From d78c157d60da890827b424212fd5861b66182608 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 17 Aug 2023 17:35:50 +0200 Subject: [PATCH 001/216] Add `defn.FunctionNOf.{apply,unapply}` This provides variant to `defn.FunctionOf` that only deals with proper `FunctionN` and `ContextFunctionN` types. This avoids some overhead. A difference between the two `unapply`s is that this one does not dealias the type, it needs to be dealiased at call site. Part of #18305 --- .../src/dotty/tools/dotc/cc/Synthetics.scala | 4 +-- .../dotty/tools/dotc/core/Definitions.scala | 25 ++++++++++++++++--- .../dotty/tools/dotc/core/TypeErasure.scala | 2 +- .../src/dotty/tools/dotc/core/Types.scala | 2 +- .../tools/dotc/transform/PickleQuotes.scala | 2 +- .../dotc/transform/SpecializeFunctions.scala | 2 +- .../tools/dotc/transform/TreeChecker.scala | 4 +-- .../dotty/tools/dotc/typer/Applications.scala | 14 +++++------ .../dotty/tools/dotc/typer/ProtoTypes.scala | 6 ++--- .../tools/dotc/typer/QuotesAndSplices.scala | 4 +-- .../dotty/tools/dotc/typer/Synthesizer.scala | 2 +- 11 files changed, 42 insertions(+), 25 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index 1e7c8d641238..c4c52513fb49 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -174,9 +174,9 @@ object Synthetics: val (et: ExprType) = symd.info: @unchecked val (enclThis: ThisType) = symd.owner.thisType: @unchecked def mapFinalResult(tp: Type, f: Type => Type): Type = - val defn.FunctionOf(args, res, isContextual) = tp: @unchecked + val defn.FunctionNOf(args, res, isContextual) = tp: @unchecked if defn.isFunctionNType(res) then - defn.FunctionOf(args, mapFinalResult(res, f), isContextual) + defn.FunctionNOf(args, mapFinalResult(res, f), isContextual) else f(tp) val resType1 = diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index b4df6bcd4ca5..fcd55cd71f8c 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1111,10 +1111,9 @@ class Definitions { object FunctionOf { def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = val mt = MethodType.companion(isContextual, false)(args, resultType) - if mt.hasErasedParams then - RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) - else - FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + if mt.hasErasedParams then RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) + else FunctionNOf(args, resultType, isContextual) + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { ft.dealias match case PolyFunctionOf(mt: MethodType) => @@ -1129,6 +1128,24 @@ class Definitions { } } + object FunctionNOf { + /** Create a `FunctionN` or `ContextFunctionN` type applied to the arguments and result type */ + def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = + FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) + + /** Matches a (possibly aliased) `FunctionN[...]` or `ContextFunctionN[...]`. + * Extracts the list of function argument types, the result type and whether function is contextual. + */ + def unapply(tpe: Type)(using Context): Option[(List[Type], Type, Boolean)] = { + val tsym = tpe.typeSymbol + if isFunctionSymbol(tsym) && tpe.isRef(tsym) then + val targs = tpe.argInfos + if (targs.isEmpty) None + else Some(targs.init, targs.last, tsym.name.isContextFunction) + else None + } + } + object RefinedFunctionOf { /** Matches a refined `PolyFunction`/`FunctionN[...]`/`ContextFunctionN[...]`. * Extracts the method type type and apply info. diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 94c7b2993b97..3e67135a842b 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -933,7 +933,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case tp: TermRef => sigName(underlyingOfTermRef(tp)) case ExprType(rt) => - sigName(defn.FunctionOf(Nil, rt)) + sigName(defn.FunctionNOf(Nil, rt)) case tp: TypeVar if !tp.isInstantiated => tpnme.Uninstantiated case tp @ defn.PolyFunctionOf(_) => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 15b7695202e1..cfa3551c0cf0 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -1890,7 +1890,7 @@ object Types { case res: MethodType => res.toFunctionType(isJava) case res => res } - defn.FunctionOf( + defn.FunctionNOf( mt.paramInfos.mapConserve(_.translateFromRepeated(toArray = isJava)), result1, isContextual) if mt.hasErasedParams then diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index b368e47bf0b3..791d461add7a 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -326,7 +326,7 @@ object PickleQuotes { defn.QuotedExprClass.typeRef.appliedTo(defn.AnyType)), args => val cases = holeContents.zipWithIndex.map { case (splice, idx) => - val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked + val defn.FunctionNOf(argTypes, defn.FunctionNOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked val rhs = { val spliceArgs = argTypes.zipWithIndex.map { (argType, i) => args(1).select(nme.apply).appliedTo(Literal(Constant(i))).asInstance(argType) diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index c50eaddd3213..9d757dc9713c 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -88,7 +88,7 @@ class SpecializeFunctions extends MiniPhase { // Need to cast to regular function, since specialized apply methods // are not members of ContextFunction0. The cast will be eliminated in // erasure. - qual.cast(defn.FunctionOf(Nil, res)) + qual.cast(defn.FunctionNOf(Nil, res)) case _ => qual qual1.select(specializedApply) diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index f84f628fc981..dd32dde93f95 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -749,9 +749,9 @@ object TreeChecker { if isTerm then defn.QuotedExprClass.typeRef.appliedTo(tree1.typeOpt) else defn.QuotedTypeClass.typeRef.appliedTo(tree1.typeOpt) val contextualResult = - defn.FunctionOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) + defn.FunctionNOf(List(defn.QuotesClass.typeRef), expectedResultType, isContextual = true) val expectedContentType = - defn.FunctionOf(argQuotedTypes, contextualResult) + defn.FunctionNOf(argQuotedTypes, contextualResult) assert(content.typeOpt =:= expectedContentType, i"unexpected content of hole\nexpected: ${expectedContentType}\nwas: ${content.typeOpt}") tree1 diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 921e3ca86fe4..aa5665a5c891 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -1724,7 +1724,7 @@ trait Applications extends Compatibility { def apply(t: Type) = t match { case t @ AppliedType(tycon, args) => def mapArg(arg: Type, tparam: TypeParamInfo) = - if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionOf(arg :: Nil, defn.UnitType) + if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) else arg mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) case _ => mapOver(t) @@ -1951,7 +1951,7 @@ trait Applications extends Compatibility { /** The shape of given tree as a type; cannot handle named arguments. */ def typeShape(tree: untpd.Tree): Type = tree match { case untpd.Function(args, body) => - defn.FunctionOf( + defn.FunctionNOf( args.map(Function.const(defn.AnyType)), typeShape(body), isContextual = untpd.isContextualClosure(tree)) case Match(EmptyTree, _) => @@ -1991,8 +1991,8 @@ trait Applications extends Compatibility { def paramCount(ref: TermRef) = val formals = ref.widen.firstParamTypes if formals.length > idx then - formals(idx) match - case defn.FunctionOf(args, _, _) => args.length + formals(idx).dealias match + case defn.FunctionNOf(args, _, _) => args.length case _ => -1 else -1 @@ -2077,8 +2077,8 @@ trait Applications extends Compatibility { else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) case pt => - val compat0 = pt match - case defn.FunctionOf(args, resType, _) => + val compat0 = pt.dealias match + case defn.FunctionNOf(args, resType, _) => narrowByTypes(alts, args, resType) case _ => Nil @@ -2266,7 +2266,7 @@ trait Applications extends Compatibility { false val commonFormal = if (isPartial) defn.PartialFunctionOf(commonParamTypes.head, WildcardType) - else defn.FunctionOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) + else defn.FunctionNOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) overload.println(i"pretype arg $arg with expected type $commonFormal") if (commonParamTypes.forall(isFullyDefined(_, ForceDegree.flipBottom))) withMode(Mode.ImplicitsEnabled) { diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 7303124b0cd4..14f97e324b86 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -383,9 +383,9 @@ object ProtoTypes { def allArgTypesAreCurrent()(using Context): Boolean = state.typedArg.size == args.length - private def isUndefined(tp: Type): Boolean = tp match { + private def isUndefined(tp: Type): Boolean = tp.dealias match { case _: WildcardType => true - case defn.FunctionOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) + case defn.FunctionNOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) case _ => false } @@ -424,7 +424,7 @@ object ProtoTypes { case ValDef(_, tpt, _) if !tpt.isEmpty => typer.typedType(tpt).typeOpt case _ => WildcardType } - targ = arg.withType(defn.FunctionOf(paramTypes, WildcardType)) + targ = arg.withType(defn.FunctionNOf(paramTypes, WildcardType)) case Some(_) if !force => targ = arg.withType(WildcardType) case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 28afccd1ca43..a172eb290f7a 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -122,7 +122,7 @@ trait QuotesAndSplices { for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val patType = if tree.args.isEmpty then pt else defn.FunctionOf(argTypes, pt) + val patType = if tree.args.isEmpty then pt else defn.FunctionNOf(argTypes, pt) val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))(using quotePatternSpliceContext) val baseType = pat.tpe.baseType(defn.QuotedExprClass) val argType = if baseType.exists then baseType.argTypesHi.head else defn.NothingType @@ -148,7 +148,7 @@ trait QuotesAndSplices { if isInBraces then // ${x}(...) match an application val typedArgs = args.map(arg => typedExpr(arg)) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val splice1 = typedSplicePattern(splice, defn.FunctionOf(argTypes, pt)) + val splice1 = typedSplicePattern(splice, defn.FunctionNOf(argTypes, pt)) untpd.cpy.Apply(tree)(splice1.select(nme.apply), typedArgs).withType(pt) else // $x(...) higher-order quasipattern if args.isEmpty then diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index cbb13a841946..c15a6da0b701 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -105,7 +105,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case AppliedType(_, funArgs @ fun :: tupled :: Nil) => def functionTypeEqual(baseFun: Type, actualArgs: List[Type], actualRet: Type, expected: Type) = - expected =:= defn.FunctionOf(actualArgs, actualRet, + expected =:= defn.FunctionNOf(actualArgs, actualRet, defn.isContextFunctionType(baseFun)) val arity: Int = if defn.isFunctionNType(fun) then From bbd8d81170780fee90d6c4f4aaa910eb0544872c Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 30 Aug 2023 11:38:46 +0200 Subject: [PATCH 002/216] Optimize `FunctionNOf.unapply` --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index fcd55cd71f8c..944712b6cd73 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1136,13 +1136,10 @@ class Definitions { /** Matches a (possibly aliased) `FunctionN[...]` or `ContextFunctionN[...]`. * Extracts the list of function argument types, the result type and whether function is contextual. */ - def unapply(tpe: Type)(using Context): Option[(List[Type], Type, Boolean)] = { - val tsym = tpe.typeSymbol - if isFunctionSymbol(tsym) && tpe.isRef(tsym) then - val targs = tpe.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction) - else None + def unapply(tpe: AppliedType)(using Context): Option[(List[Type], Type, Boolean)] = { + val targs = tpe.args + if targs.isEmpty || !isFunctionNType(tpe) then None + else Some(targs.init, targs.last, tpe.typeSymbol.name.isContextFunction) } } From 1e7243a658816d050a6b7c31a1f44620fd94026f Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 31 Aug 2023 17:55:17 +0200 Subject: [PATCH 003/216] Remove unnecessary guard --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 944712b6cd73..7b34eea94ec7 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1137,9 +1137,8 @@ class Definitions { * Extracts the list of function argument types, the result type and whether function is contextual. */ def unapply(tpe: AppliedType)(using Context): Option[(List[Type], Type, Boolean)] = { - val targs = tpe.args - if targs.isEmpty || !isFunctionNType(tpe) then None - else Some(targs.init, targs.last, tpe.typeSymbol.name.isContextFunction) + if !isFunctionNType(tpe) then None + else Some(tpe.args.init, tpe.args.last, tpe.typeSymbol.name.isContextFunction) } } From 69cc0ee479655fe720da438b34ad6892dae42392 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 19 Sep 2023 10:52:37 +0200 Subject: [PATCH 004/216] Add `-experimental` compiler flags When enabled, all top-level definitions are annotated as `@experimental`. This implies that all experimental language features and definitions can be used in this project. Note that this does not change the strong guarantees on stability of non-experimental code. The experimental features can only be used in a experimental scope (transitively). This flags does not affect the use of `ResearchPlugin`. --- .../src/dotty/tools/dotc/config/Feature.scala | 9 +++++++-- .../tools/dotc/config/ScalaSettings.scala | 1 + .../src/dotty/tools/dotc/plugins/Plugins.scala | 3 ++- .../dotty/tools/dotc/transform/PostTyper.scala | 11 +++++++++-- .../changed-features/compiler-plugins.md | 2 +- docs/_docs/reference/experimental/overview.md | 6 ++++++ .../other-new-features/experimental-defs.md | 6 ++++++ .../expeimental-flag-with-lang-feature-1.scala | 5 +++++ .../expeimental-flag-with-lang-feature-2.scala | 7 +++++++ tests/neg/expeimental-flag.scala | 18 ++++++++++++++++++ .../expeimental-flag-with-lang-feature.scala | 10 ++++++++++ tests/pos/expeimental-flag.scala | 18 ++++++++++++++++++ 12 files changed, 90 insertions(+), 6 deletions(-) create mode 100644 tests/neg/expeimental-flag-with-lang-feature-1.scala create mode 100644 tests/neg/expeimental-flag-with-lang-feature-2.scala create mode 100644 tests/neg/expeimental-flag.scala create mode 100644 tests/pos/expeimental-flag-with-lang-feature.scala create mode 100644 tests/pos/expeimental-flag.scala diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 5bcc139326f9..4486aaab7fc9 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -134,7 +134,12 @@ object Feature: def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then - report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + report.error( + em"""Experimental $which may only be used under experimental mode: + | 1. In a definition marked as @experimental + | 2. Compiling with the -experimental compiler flag + | 3. With a nightly or snapshot version of the compiler$note + """, srcPos) private def ccException(sym: Symbol)(using Context): Boolean = ccEnabled && defn.ccExperimental.contains(sym) @@ -159,7 +164,7 @@ object Feature: do checkExperimentalFeature(s"feature $setting", NoSourcePosition) def isExperimentalEnabled(using Context): Boolean = - Properties.experimental && !ctx.settings.YnoExperimental.value + (Properties.experimental || ctx.settings.experimental.value) && !ctx.settings.YnoExperimental.value /** Handle language import `import language..` if it is one * of the global imports `pureFunctions` or `captureChecking`. In this case diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 3dbfbfc6bab9..0e684a6a3e5b 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -119,6 +119,7 @@ trait CommonScalaSettings: val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) + val experimental: Setting[Boolean] = BooleanSetting("-experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index c44fe4cf59b4..d3936e4280a9 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -10,6 +10,7 @@ import config.{ PathResolver, Feature } import dotty.tools.io._ import Phases._ import config.Printers.plugins.{ println => debug } +import config.Properties /** Support for run-time loading of compiler plugins. * @@ -126,7 +127,7 @@ trait Plugins { val updatedPlan = Plugins.schedule(plan, pluginPhases) // add research plugins - if (Feature.isExperimentalEnabled) + if Properties.experimental && !ctx.settings.YnoExperimental.value then plugins.collect { case p: ResearchPlugin => p }.foldRight(updatedPlan) { (plug, plan) => plug.init(options(plug), plan) } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 964486632979..5420b120037c 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -379,6 +379,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => ) } case tree: ValDef => + annotateExperimental(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) @@ -386,6 +387,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) case tree: DefDef => + annotateExperimental(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) annotateContextResults(tree) @@ -537,9 +539,14 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => report.error("`erased` definition cannot be implemented with en expression of type Null", tree.srcPos) private def annotateExperimental(sym: Symbol)(using Context): Unit = - if sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot) then + def isTopLevelDefinitionInSource(sym: Symbol) = + !sym.is(Package) && !sym.name.isPackageObjectName && + (sym.owner.is(Package) || (sym.owner.isPackageObject && !sym.isConstructor)) + if !sym.hasAnnotation(defn.ExperimentalAnnot) + && (ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym)) + || (sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot)) + then sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) - sym.companionModule.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) private def scala2LibPatch(tree: TypeDef)(using Context) = val sym = tree.symbol diff --git a/docs/_docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md index 82d38bd44d96..6be8a62c7ac4 100644 --- a/docs/_docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -18,7 +18,7 @@ For experimentation and research, Scala 3 introduces _research plugin_. Research are more powerful than Scala 2 analyzer plugins as they let plugin authors customize the whole compiler pipeline. One can easily replace the standard typer by a custom one or create a parser for a domain-specific language. However, research plugins are only -enabled for nightly or snaphot releases of Scala 3. +enabled with the `-experimental` compiler flag or in nightly/snapshot releases of Scala 3. Common plugins that add new phases to the compiler pipeline are called _standard plugins_ in Scala 3. In terms of features, they are similar to diff --git a/docs/_docs/reference/experimental/overview.md b/docs/_docs/reference/experimental/overview.md index 254f103896e4..f70cf32b9c24 100644 --- a/docs/_docs/reference/experimental/overview.md +++ b/docs/_docs/reference/experimental/overview.md @@ -21,6 +21,12 @@ They are enabled by importing the feature or using the `-language` compiler flag In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md)). They can be imported at the top-level if all top-level definitions are `@experimental`. +### `-experimental` compiler flag + +This flag enables the use of any experimental language feature in the project. +It does this by adding an `@experimental` annotation to all top-level definitions. +Hence, dependent projects also have to be experimental. + ## Experimental language features supported by special compiler options Some experimental language features that are still in research and development can be enabled with special compiler options. These include diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index 88815ad1e136..b71b20ecc036 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -309,3 +309,9 @@ class MyExperimentalTests { ``` + +## `-experimental` compiler flag + +This flag enables the use of any experimental language feature in the project. +It does this by adding an `@experimental` annotation to all top-level definitions. +Hence, dependent projects also have to be experimental. diff --git a/tests/neg/expeimental-flag-with-lang-feature-1.scala b/tests/neg/expeimental-flag-with-lang-feature-1.scala new file mode 100644 index 000000000000..a5ece729fa3d --- /dev/null +++ b/tests/neg/expeimental-flag-with-lang-feature-1.scala @@ -0,0 +1,5 @@ +//> using options -Yno-experimental + +import scala.language.experimental.erasedDefinitions + +erased def erasedFun(erased x: Int): Int = x // error // error diff --git a/tests/neg/expeimental-flag-with-lang-feature-2.scala b/tests/neg/expeimental-flag-with-lang-feature-2.scala new file mode 100644 index 000000000000..3e0b9359711a --- /dev/null +++ b/tests/neg/expeimental-flag-with-lang-feature-2.scala @@ -0,0 +1,7 @@ +//> using options -Yno-experimental + +import scala.language.experimental.namedTypeArguments // error + +def namedTypeArgumentsFun[T, U]: Int = + namedTypeArgumentsFun[T = Int, U = Int] + namedTypeArgumentsFun[U = Int, T = Int] diff --git a/tests/neg/expeimental-flag.scala b/tests/neg/expeimental-flag.scala new file mode 100644 index 000000000000..8b2e729ea8da --- /dev/null +++ b/tests/neg/expeimental-flag.scala @@ -0,0 +1,18 @@ +//> using options -Yno-experimental + +import scala.annotation.experimental + +class Foo: + def foo: Int = experimentalDef // error + +class Bar: + def bar: Int = experimentalDef // error +object Bar: + def bar: Int = experimentalDef // error + +object Baz: + def bar: Int = experimentalDef // error + +def toplevelMethod: Int = experimentalDef // error + +@experimental def experimentalDef: Int = 1 diff --git a/tests/pos/expeimental-flag-with-lang-feature.scala b/tests/pos/expeimental-flag-with-lang-feature.scala new file mode 100644 index 000000000000..9cfb716b1015 --- /dev/null +++ b/tests/pos/expeimental-flag-with-lang-feature.scala @@ -0,0 +1,10 @@ +//> using options -experimental -Yno-experimental + +import scala.language.experimental.erasedDefinitions +import scala.language.experimental.namedTypeArguments + +erased def erasedFun(erased x: Int): Int = x + +def namedTypeArgumentsFun[T, U]: Int = + namedTypeArgumentsFun[T = Int, U = Int] + namedTypeArgumentsFun[U = Int, T = Int] diff --git a/tests/pos/expeimental-flag.scala b/tests/pos/expeimental-flag.scala new file mode 100644 index 000000000000..9d3daf12fddc --- /dev/null +++ b/tests/pos/expeimental-flag.scala @@ -0,0 +1,18 @@ +//> using options -experimental -Yno-experimental + +import scala.annotation.experimental + +class Foo: + def foo: Int = experimentalDef + +class Bar: + def bar: Int = experimentalDef +object Bar: + def bar: Int = experimentalDef + +object Baz: + def bar: Int = experimentalDef + +def toplevelMethod: Int = experimentalDef + +@experimental def experimentalDef: Int = 1 From 6e256b7711186acdbf27bdae584f1499322739d6 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 12 Oct 2023 11:00:34 +0100 Subject: [PATCH 005/216] Fix lubbing, push isSoft=false through --- .../src/dotty/tools/dotc/core/TypeComparer.scala | 2 +- tests/pos/i18626.min1.scala | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i18626.min1.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 8df809dc9ee6..9d2bf3e05ebc 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2388,7 +2388,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case Atoms.Range(lo2, hi2) => if hi1.subsetOf(lo2) then return tp2 if hi2.subsetOf(lo1) then return tp1 - if (hi1 & hi2).isEmpty then return orType(tp1, tp2) + if (hi1 & hi2).isEmpty then return orType(tp1, tp2, isSoft = isSoft) case none => case none => val t1 = mergeIfSuper(tp1, tp2, canConstrain) diff --git a/tests/pos/i18626.min1.scala b/tests/pos/i18626.min1.scala new file mode 100644 index 000000000000..ae895db4b29c --- /dev/null +++ b/tests/pos/i18626.min1.scala @@ -0,0 +1,14 @@ +sealed trait Animal +object Cat extends Animal +object Dog extends Animal + +type Mammal = Cat.type | Dog.type + +class Test: + def t1 = + val mammals: List[Mammal] = ??? + val result = mammals.head + val mammal: Mammal = result // was: Type Mismatch Error: + // Found: (result : Animal) + // Required: Mammal + () From 5290632f6619562e72345110a9db0cae47eec9c4 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 12 Oct 2023 10:55:17 +0100 Subject: [PATCH 006/216] Move hardenTypeVars into Constraint, so it can be reused --- compiler/src/dotty/tools/dotc/core/Constraint.scala | 3 +++ .../dotty/tools/dotc/core/OrderingConstraint.scala | 6 ++++++ .../src/dotty/tools/dotc/core/TypeComparer.scala | 13 +------------ 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index c634f847e510..a205b1f3b1a1 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -138,6 +138,9 @@ abstract class Constraint extends Showable { /** The same as this constraint, but with `tv` marked as hard. */ def withHard(tv: TypeVar)(using Context): This + /** Mark toplevel type vars in `tp` as hard. */ + def hardenTypeVars(tp: Type)(using Context): This + /** Gives for each instantiated type var that does not yet have its `inst` field * set, the instance value stored in the constraint. Storing instances in constraints * is done only in a temporary way for contexts that may be retracted diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 0328cea9b3ca..50e9ac953954 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -751,6 +751,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current.checkWellFormed() end replace + def hardenTypeVars(tp: Type)(using Context): OrderingConstraint = tp.dealiasKeepRefiningAnnots match + case tp: TypeVar if contains(tp.origin) => withHard(tp) + case tp: TypeParamRef if contains(tp) => hardenTypeVars(typeVarOfParam(tp)) + case tp: AndOrType => hardenTypeVars(tp.tp1).hardenTypeVars(tp.tp2) + case _ => this + def remove(pt: TypeLambda)(using Context): This = { def removeFromOrdering(po: ParamOrdering) = { def removeFromBoundss(key: TypeLambda, bndss: Array[List[TypeParamRef]]): Array[List[TypeParamRef]] = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 9d2bf3e05ebc..25f35a9dd32d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -501,17 +501,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } - /** Mark toplevel type vars in `tp2` as hard in the current constraint */ - def hardenTypeVars(tp2: Type): Unit = tp2.dealiasKeepRefiningAnnots match - case tvar: TypeVar if constraint.contains(tvar.origin) => - constraint = constraint.withHard(tvar) - case tp2: TypeParamRef if constraint.contains(tp2) => - hardenTypeVars(constraint.typeVarOfParam(tp2)) - case tp2: AndOrType => - hardenTypeVars(tp2.tp1) - hardenTypeVars(tp2.tp2) - case _ => - val res = widenOK || joinOK || recur(tp11, tp2) && recur(tp12, tp2) || containsAnd(tp1) @@ -534,7 +523,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // is marked so that it converts all soft unions in its lower bound to hard unions // before it is instantiated. The reason is that the variable's instance type will // be a supertype of (decomposed and reconstituted) `tp1`. - hardenTypeVars(tp2) + constraint = constraint.hardenTypeVars(tp2) res From cc55175bd9b4fc04d47448c15d0a68401f01e3d2 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 12 Oct 2023 11:01:46 +0100 Subject: [PATCH 007/216] Propagate hard vars when replacing Constraint params --- .../tools/dotc/core/OrderingConstraint.scala | 3 ++ tests/pos/i18626.scala | 32 +++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 tests/pos/i18626.scala diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 50e9ac953954..7b94eb3f67d3 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -748,6 +748,9 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } if isRemovable(param.binder) then current = current.remove(param.binder) current.dropDeps(param) + replacedTypeVar match + case replacedTypeVar: TypeVar if isHard(replacedTypeVar) => current = current.hardenTypeVars(replacement) + case _ => current.checkWellFormed() end replace diff --git a/tests/pos/i18626.scala b/tests/pos/i18626.scala new file mode 100644 index 000000000000..0fda265f3812 --- /dev/null +++ b/tests/pos/i18626.scala @@ -0,0 +1,32 @@ +trait Random[F1[_]]: + def element[T1](list: Seq[T1]): F1[T1] = ??? + +trait Monad[F2[_]]: + def map[A1, B1](fa: F2[A1])(f: A1 => B1): F2[B1] + +object Monad: + extension [F3[_]: Monad, A3](fa: F3[A3]) + def map[B3](f: A3 => B3): F3[B3] = ??? + +sealed trait Animal +object Cat extends Animal +object Dog extends Animal + +type Mammal = Cat.type | Dog.type +val mammals: List[Mammal] = ??? + +class Work[F4[_]](random: Random[F4])(using mf: Monad[F4]): + def result1: F4[Mammal] = + mf.map(fa = random.element(mammals))(a => a) + + def result2: F4[Mammal] = Monad.map(random.element(mammals))(a => a) + + import Monad.* + + def result3: F4[Mammal] = random + .element(mammals) + .map { a => + a // was: Type Mismatch Error: + // Found: (a : Animal) + // Required: Cat.type | Dog.type +} From cf034f03514c10ed2ad09d20d1d8f32570439359 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 22 Aug 2023 15:22:54 +0200 Subject: [PATCH 008/216] Handle dependent context functions Add `FunctionTypeOfMethod` extractor that matches any kind of function and return its method type. We use this extractor instead of `ContextFunctionType` to all of * `ContextFunctionN[...]` * `ContextFunctionN[...] { def apply(using ...): R }` where `R` might be dependent on the parameters. * `PolyFunction { def apply(using ...): R }` where `R` might be dependent on the parameters. Currently this one would have at least one erased parameter. --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 2 +- .../src/dotty/tools/dotc/core/Definitions.scala | 16 ++++++++++++++++ .../dotc/transform/ContextFunctionResults.scala | 16 ++++++++-------- 3 files changed, 25 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 4aaef28b9e1e..9751e8272858 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -990,7 +990,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { def isStructuralTermSelect(tree: Select) = def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match - case defn.PolyFunctionOf(_) => + case defn.FunctionTypeOfMethod(_) => false case RefinedType(parent, rname, rinfo) => rname == tree.name || hasRefinement(parent) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 22a49a760e57..dfa43f7407eb 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1108,6 +1108,22 @@ class Definitions { // - .linkedClass: the ClassSymbol of the enumeration (class E) sym.owner.linkedClass.typeRef + object FunctionTypeOfMethod { + /** Matches a `FunctionN[...]`/`ContextFunctionN[...]` or refined `PolyFunction`/`FunctionN[...]`/`ContextFunctionN[...]`. + * Extracts the method type type and apply info. + */ + def unapply(ft: Type)(using Context): Option[MethodOrPoly] = { + ft match + case RefinedType(parent, nme.apply, mt: MethodOrPoly) + if parent.derivesFrom(defn.PolyFunctionClass) || isFunctionNType(parent) => + Some(mt) + case FunctionOf(argTypes, resultType, isContextual) => + val methodType = if isContextual then ContextualMethodType else MethodType + Some(methodType(argTypes, resultType)) + case _ => None + } + } + object FunctionOf { def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = val mt = MethodType.companion(isContextual, false)(args, resultType) diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index 01a77427698a..1b1d78182f0f 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -58,8 +58,8 @@ object ContextFunctionResults: */ def contextResultsAreErased(sym: Symbol)(using Context): Boolean = def allErased(tp: Type): Boolean = tp.dealias match - case defn.ContextFunctionType(argTpes, resTpe) => - argTpes.forall(_.hasAnnotation(defn.ErasedParamAnnot)) && allErased(resTpe) + case ft @ defn.FunctionTypeOfMethod(mt: MethodType) if mt.isContextualMethod => + mt.nonErasedParamCount == 0 && allErased(mt.resType) case _ => true contextResultCount(sym) > 0 && allErased(sym.info.finalResultType) @@ -68,13 +68,13 @@ object ContextFunctionResults: */ def integrateContextResults(tp: Type, crCount: Int)(using Context): Type = if crCount == 0 then tp - else tp match + else tp.dealias match case ExprType(rt) => integrateContextResults(rt, crCount) case tp: MethodOrPoly => tp.derivedLambdaType(resType = integrateContextResults(tp.resType, crCount)) - case defn.ContextFunctionType(argTypes, resType) => - MethodType(argTypes, integrateContextResults(resType, crCount - 1)) + case defn.FunctionTypeOfMethod(mt) if mt.isContextualMethod => + mt.derivedLambdaType(resType = integrateContextResults(mt.resType, crCount - 1)) /** The total number of parameters of method `sym`, not counting * erased parameters, but including context result parameters. @@ -101,7 +101,7 @@ object ContextFunctionResults: def recur(tp: Type, n: Int): Type = if n == 0 then tp else tp match - case defn.ContextFunctionType(_, resTpe) => recur(resTpe, n - 1) + case defn.FunctionTypeOfMethod(mt) => recur(mt.resType, n - 1) recur(meth.info.finalResultType, depth) /** Should selection `tree` be eliminated since it refers to an `apply` @@ -115,8 +115,8 @@ object ContextFunctionResults: else tree match case Select(qual, name) => if name == nme.apply then - qual.tpe match - case defn.ContextFunctionType(_, _) => + qual.tpe.nn.dealias match + case defn.FunctionTypeOfMethod(mt) if mt.isContextualMethod => integrateSelect(qual, n + 1) case _ if defn.isContextFunctionClass(tree.symbol.maybeOwner) => // for TermRefs integrateSelect(qual, n + 1) From b0eaf17b8be525fe3fa91afc5b31039cc74a6d52 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 30 Aug 2023 10:42:20 +0200 Subject: [PATCH 009/216] Inline FunctionOf in FunctionTypeOfMethod and optimize --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index dfa43f7407eb..d7b1f290cb1c 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1117,10 +1117,14 @@ class Definitions { case RefinedType(parent, nme.apply, mt: MethodOrPoly) if parent.derivesFrom(defn.PolyFunctionClass) || isFunctionNType(parent) => Some(mt) - case FunctionOf(argTypes, resultType, isContextual) => - val methodType = if isContextual then ContextualMethodType else MethodType - Some(methodType(argTypes, resultType)) - case _ => None + case _ => + val tsym = ft.typeSymbol + val targs = ft.argInfos + if targs.nonEmpty && isFunctionSymbol(tsym) && ft.isRef(tsym) then + val isContextual = tsym.name.isContextFunction + val methodType = if isContextual then ContextualMethodType else MethodType + Some(methodType(targs.init, targs.last)) + else None } } From 94c01ffc784f3e74655ae7bb612d1a0312110b03 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 30 Aug 2023 11:05:13 +0200 Subject: [PATCH 010/216] Optimize `FunctionTypeOfMethod` --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index d7b1f290cb1c..ee7262fa389b 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1117,14 +1117,12 @@ class Definitions { case RefinedType(parent, nme.apply, mt: MethodOrPoly) if parent.derivesFrom(defn.PolyFunctionClass) || isFunctionNType(parent) => Some(mt) + case AppliedType(parent, targs) if targs.nonEmpty && isFunctionNType(ft) => + val isContextual = ft.typeSymbol.name.isContextFunction + val methodType = if isContextual then ContextualMethodType else MethodType + Some(methodType(targs.init, targs.last)) case _ => - val tsym = ft.typeSymbol - val targs = ft.argInfos - if targs.nonEmpty && isFunctionSymbol(tsym) && ft.isRef(tsym) then - val isContextual = tsym.name.isContextFunction - val methodType = if isContextual then ContextualMethodType else MethodType - Some(methodType(targs.init, targs.last)) - else None + None } } From 527dd8e37522ece8a6090abc9ce60c9190d4771a Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 30 Aug 2023 11:11:37 +0200 Subject: [PATCH 011/216] Optimize FunctionTypeOfMethod RefinedType guard --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index ee7262fa389b..e4f32ea97772 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1115,7 +1115,7 @@ class Definitions { def unapply(ft: Type)(using Context): Option[MethodOrPoly] = { ft match case RefinedType(parent, nme.apply, mt: MethodOrPoly) - if parent.derivesFrom(defn.PolyFunctionClass) || isFunctionNType(parent) => + if parent.derivesFrom(defn.PolyFunctionClass) || (mt.isInstanceOf[MethodType] && isFunctionNType(parent)) => Some(mt) case AppliedType(parent, targs) if targs.nonEmpty && isFunctionNType(ft) => val isContextual = ft.typeSymbol.name.isContextFunction From d5d8273fbeb6c8c303ac76d9c33f7c68511bd01b Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 31 Aug 2023 17:53:34 +0200 Subject: [PATCH 012/216] Remove unnecessary guard for AppliedType --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index e4f32ea97772..846a1f68cb79 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1117,7 +1117,7 @@ class Definitions { case RefinedType(parent, nme.apply, mt: MethodOrPoly) if parent.derivesFrom(defn.PolyFunctionClass) || (mt.isInstanceOf[MethodType] && isFunctionNType(parent)) => Some(mt) - case AppliedType(parent, targs) if targs.nonEmpty && isFunctionNType(ft) => + case AppliedType(parent, targs) if isFunctionNType(ft) => val isContextual = ft.typeSymbol.name.isContextFunction val methodType = if isContextual then ContextualMethodType else MethodType Some(methodType(targs.init, targs.last)) From 1d2219e9599f3d5b6680ee8cd05ff70fb434072f Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 17 Oct 2023 16:00:04 +0200 Subject: [PATCH 013/216] Make i13044 more resilient This tests that we handle stack overflows correctly. Due to some general improvements we are closer to not reaching the stack overflow. Therefore, we make this tests unroll a few more inlines to make sure that this stack overflow is reached. --- tests/neg/i13044.check | 8 ++++---- tests/neg/i13044.scala | 17 ++++++++++++++++- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/tests/neg/i13044.check b/tests/neg/i13044.check index c5584aadf767..1fbe978a49b8 100644 --- a/tests/neg/i13044.check +++ b/tests/neg/i13044.check @@ -1,5 +1,5 @@ --- Error: tests/neg/i13044.scala:50:40 --------------------------------------------------------------------------------- -50 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- +65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | given instance gen is declared as `inline`, but was not inlined | @@ -71,8 +71,8 @@ 37 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ -------------------------------------------------------------------------------------------------------------------- --- Error: tests/neg/i13044.scala:50:40 --------------------------------------------------------------------------------- -50 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- +65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | method recurse is declared as `inline`, but was not inlined | diff --git a/tests/neg/i13044.scala b/tests/neg/i13044.scala index 081b642c604c..42417a9096f9 100644 --- a/tests/neg/i13044.scala +++ b/tests/neg/i13044.scala @@ -37,7 +37,22 @@ trait SchemaDerivation { inline given gen[A]: Schema[A] = derived } -case class H(i: Int) +case class X15(i: Int) +case class X14(i: X15) +case class X13(i: X14) +case class X12(i: X13) +case class X11(i: X12) +case class X10(i: X11) +case class X9(i: X10) +case class X8(i: X9) +case class X7(i: X8) +case class X6(i: X7) +case class X5(i: X6) +case class X4(i: X5) +case class X3(i: X4) +case class X2(i: X3) +case class X1(i: X2) +case class H(i: X1) case class G(h: H) case class F(g: G) case class E(f: Option[F]) From a17552564ab467b25a90e6b6405683d4b03ddec7 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 19 Oct 2023 14:47:57 +0100 Subject: [PATCH 014/216] Tweak convertible implicits fix Rather than widen in viewExists, widen earlier, past type lambda parameters. This allows `foo2` in `i16453b2` from being listed as a possible implicit, as appropriate. --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 4 ++-- tests/neg/i16453b1.check | 8 ++++++++ tests/neg/i16453b1.scala | 12 ++++++++++++ tests/neg/i16453b2.check | 8 ++++++++ tests/neg/i16453b2.scala | 12 ++++++++++++ 5 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 tests/neg/i16453b1.check create mode 100644 tests/neg/i16453b1.scala create mode 100644 tests/neg/i16453b2.check create mode 100644 tests/neg/i16453b2.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5cba406a302e..ed742a847ec8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -850,7 +850,7 @@ trait Implicits: && !to.isError && !ctx.isAfterTyper && ctx.mode.is(Mode.ImplicitsEnabled) - && from.widen.isValueType + && from.isValueType && ( from.isValueSubType(to) || inferView(dummyTreeOfType(from), to) (using ctx.fresh.addMode(Mode.ImplicitExploration).setExploreTyperState()).isSuccess @@ -982,7 +982,7 @@ trait Implicits: .filter { imp => !isImplicitDefConversion(imp.underlying) && imp.symbol != defn.Predef_conforms - && viewExists(imp, fail.expectedType) + && viewExists(imp.underlying.resultType, fail.expectedType) } else Nil diff --git a/tests/neg/i16453b1.check b/tests/neg/i16453b1.check new file mode 100644 index 000000000000..07f23c95d9ad --- /dev/null +++ b/tests/neg/i16453b1.check @@ -0,0 +1,8 @@ +-- [E172] Type Error: tests/neg/i16453b1.scala:11:19 ------------------------------------------------------------------- +11 | val ko = get[Int] // error + | ^ + |No given instance of type Ctx => Option[Int] was found for parameter fn of method get + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Ctx => Option[Int]: + |- final lazy given val foo: Ctx => Int diff --git a/tests/neg/i16453b1.scala b/tests/neg/i16453b1.scala new file mode 100644 index 000000000000..4a99321a9421 --- /dev/null +++ b/tests/neg/i16453b1.scala @@ -0,0 +1,12 @@ +import scala.language.implicitConversions + +sealed trait Ctx +given ct[T]: Conversion[Ctx => T, Ctx => Option[T]] = fn => fn.andThen(Option.apply) + +def get[T](using fn: Ctx => Option[T]): Option[T] = ??? + +def Test = { + given foo: (Ctx => Int) = _ => 42 + val ok = get[Int](using summon[Ctx => Int]) + val ko = get[Int] // error +} diff --git a/tests/neg/i16453b2.check b/tests/neg/i16453b2.check new file mode 100644 index 000000000000..fc0a1fa37b82 --- /dev/null +++ b/tests/neg/i16453b2.check @@ -0,0 +1,8 @@ +-- [E172] Type Error: tests/neg/i16453b2.scala:11:19 ------------------------------------------------------------------- +11 | val ko = get[Int] // error + | ^ + |No given instance of type Ctx => Option[Int] was found for parameter fn of method get + | + |Note: implicit conversions are not automatically applied to arguments of using clauses. You will have to pass the argument explicitly. + |The following implicits in scope can be implicitly converted to Ctx => Option[Int]: + |- final given def foo2[A]: Ctx => Int diff --git a/tests/neg/i16453b2.scala b/tests/neg/i16453b2.scala new file mode 100644 index 000000000000..6cafeb54b540 --- /dev/null +++ b/tests/neg/i16453b2.scala @@ -0,0 +1,12 @@ +import scala.language.implicitConversions + +sealed trait Ctx +given ct[T]: Conversion[Ctx => T, Ctx => Option[T]] = fn => fn.andThen(Option.apply) + +def get[T](using fn: Ctx => Option[T]): Option[T] = ??? + +def Test = { + given foo2[A]: (Ctx => Int) = _ => 42 + val ok = get[Int](using summon[Ctx => Int]) + val ko = get[Int] // error +} From a1961676cb922f7bab7fff8263f24ff0a9a18048 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 27 Oct 2023 14:00:35 +0100 Subject: [PATCH 015/216] Allow inferred parameter types always, when eta-expanding Rather than drawing an exemption based on fully defined types, report the right function arity. This leads eta-expansion to leave off the type, so then the parameter type is inferred. In the test case pos/i18453 this leads to the type var ?A being instantiated to X, rather than ?A & ?B being constrained against X & Y, which leads to ?A being instantiated to X & Y - and then failing to find a Box[X & Y] in scope. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 6 +----- tests/neg/i5976.scala | 4 ++-- tests/pos/i18453.scala | 8 ++++++++ tests/pos/i18453.workaround.scala | 8 ++++++++ tests/semanticdb/metac.expect | 5 ++--- 5 files changed, 21 insertions(+), 10 deletions(-) create mode 100644 tests/pos/i18453.scala create mode 100644 tests/pos/i18453.workaround.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6a726432a4b0..da7cc84c3eec 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4167,11 +4167,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val funExpected = functionExpected val arity = if funExpected then - if !isFullyDefined(pt, ForceDegree.none) && isFullyDefined(wtp, ForceDegree.none) then - // if method type is fully defined, but expected type is not, - // prioritize method parameter types as parameter types of the eta-expanded closure - 0 - else defn.functionArity(ptNorm) + defn.functionArity(ptNorm) else val nparams = wtp.paramInfos.length if nparams > 1 diff --git a/tests/neg/i5976.scala b/tests/neg/i5976.scala index 8a9c29b85ae1..ef2e743e39fe 100644 --- a/tests/neg/i5976.scala +++ b/tests/neg/i5976.scala @@ -1,7 +1,7 @@ object Test { def f(i: => Int) = i + i - val res = List(42).map(f) // error + val res = List(42).map(f) val g: (=> Int) => Int = f val h: Int => Int = g // error -} \ No newline at end of file +} diff --git a/tests/pos/i18453.scala b/tests/pos/i18453.scala new file mode 100644 index 000000000000..40dd14935a10 --- /dev/null +++ b/tests/pos/i18453.scala @@ -0,0 +1,8 @@ +trait Box[T] + +class Test: + def f[A, B](c: A => A & B)(using ba: Box[A]): Unit = ??? + + def g[X, Y](using bx: Box[X]): Unit = + def d(t: X): X & Y = t.asInstanceOf[X & Y] + f(d) diff --git a/tests/pos/i18453.workaround.scala b/tests/pos/i18453.workaround.scala new file mode 100644 index 000000000000..2c562279f0e8 --- /dev/null +++ b/tests/pos/i18453.workaround.scala @@ -0,0 +1,8 @@ +trait Box[T] + +class Test: + def f[A, B](c: A => A & B)(using ba: Box[A]): Unit = ??? + + def g[X, Y](using bx: Box[X]): Unit = + def d(t: X): X & Y = t.asInstanceOf[X & Y] + f(u => d(u)) diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index e05a645c0141..ebe9a38917cf 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -1070,7 +1070,7 @@ Language => Scala Symbols => 181 entries Occurrences => 159 entries Diagnostics => 1 entries -Synthetics => 6 entries +Synthetics => 5 entries Symbols: _empty_/Enums. => final object Enums extends Object { self: Enums.type => +30 decls } @@ -1253,7 +1253,7 @@ _empty_/Enums.unwrap().(ev) => implicit given param ev: <:<[A, Option[B]] _empty_/Enums.unwrap().(opt) => param opt: Option[A] _empty_/Enums.unwrap().[A] => typeparam A _empty_/Enums.unwrap().[B] => typeparam B -local0 => param x: Option[B] +local0 => param x: A Occurrences: [0:7..0:12): Enums <- _empty_/Enums. @@ -1421,7 +1421,6 @@ Diagnostics: Synthetics: [52:9..52:13):Refl => *.unapply[Option[B]] -[52:31..52:50):identity[Option[B]] => *[Function1[A, Option[B]]] [54:14..54:18):Some => *.apply[Some[Int]] [54:14..54:34):Some(Some(1)).unwrap => *(given_<:<_T_T[Option[Int]]) [54:19..54:23):Some => *.apply[Int] From c17d94cd952868877f40a26d8925e88e1a281ee2 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 27 Oct 2023 21:40:34 -0700 Subject: [PATCH 016/216] Don't add explanation twice --- .../tools/dotc/reporting/MessageRendering.scala | 1 + .../src/dotty/tools/xsbt/DelegatingReporter.java | 16 ++++------------ 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index f53359fb8b19..f01b0a735680 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -198,6 +198,7 @@ trait MessageRendering { end posStr /** Explanation rendered under "Explanation" header */ + @deprecated("Explanations are rendered with the message text", since="3.4") def explanation(m: Message)(using Context): String = { val sb = new StringBuilder( s"""| diff --git a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java index e6ddbc51ea32..577ef028f194 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java +++ b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java @@ -48,22 +48,14 @@ public void printSummary(Context ctx) { public void doReport(Diagnostic dia, Context ctx) { Severity severity = severityOf(dia.level()); Position position = positionOf(dia.pos().nonInlined()); - - StringBuilder rendered = new StringBuilder(); - rendered.append(messageAndPos(dia, ctx)); Message message = dia.msg(); - StringBuilder messageBuilder = new StringBuilder(); - messageBuilder.append(message.message()); + StringBuilder messageBuilder = new StringBuilder(message.message()); String diagnosticCode = String.valueOf(message.errorId().errorNumber()); - boolean shouldExplain = Diagnostic.shouldExplain(dia, ctx); List actions = CollectionConverters.asJava(message.actions(ctx)); - if (shouldExplain && !message.explanation().isEmpty()) { - rendered.append(explanation(message, ctx)); + if (Diagnostic.shouldExplain(dia, ctx) && !message.explanation().isEmpty()) messageBuilder.append(System.lineSeparator()).append(explanation(message, ctx)); - } - - delegate.log(new Problem(position, messageBuilder.toString(), severity, rendered.toString(), diagnosticCode, actions, - lookupVirtualFileId)); + Problem problem = new Problem(position, messageBuilder.toString(), severity, messageAndPos(dia, ctx), diagnosticCode, actions, lookupVirtualFileId); + delegate.log(problem); } public void reportBasicWarning(String message) { From 5b5e67ee4a1953185ce4d503b9ae11a0ed45f516 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 30 Oct 2023 11:56:56 -0700 Subject: [PATCH 017/216] Leverage string concat, blue explain is not deprecated --- .../dotty/tools/dotc/reporting/MessageRendering.scala | 1 - .../src/dotty/tools/xsbt/DelegatingReporter.java | 11 +++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index f01b0a735680..f53359fb8b19 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -198,7 +198,6 @@ trait MessageRendering { end posStr /** Explanation rendered under "Explanation" header */ - @deprecated("Explanations are rendered with the message text", since="3.4") def explanation(m: Message)(using Context): String = { val sb = new StringBuilder( s"""| diff --git a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java index 577ef028f194..3bcff72601a7 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java +++ b/sbt-bridge/src/dotty/tools/xsbt/DelegatingReporter.java @@ -49,12 +49,15 @@ public void doReport(Diagnostic dia, Context ctx) { Severity severity = severityOf(dia.level()); Position position = positionOf(dia.pos().nonInlined()); Message message = dia.msg(); - StringBuilder messageBuilder = new StringBuilder(message.message()); + String text; + if (Diagnostic.shouldExplain(dia, ctx) && !message.explanation().isEmpty()) + text = message.message() + System.lineSeparator() + explanation(message, ctx); + else + text = message.message(); + String rendered = messageAndPos(dia, ctx); String diagnosticCode = String.valueOf(message.errorId().errorNumber()); List actions = CollectionConverters.asJava(message.actions(ctx)); - if (Diagnostic.shouldExplain(dia, ctx) && !message.explanation().isEmpty()) - messageBuilder.append(System.lineSeparator()).append(explanation(message, ctx)); - Problem problem = new Problem(position, messageBuilder.toString(), severity, messageAndPos(dia, ctx), diagnosticCode, actions, lookupVirtualFileId); + Problem problem = new Problem(position, text, severity, rendered, diagnosticCode, actions, lookupVirtualFileId); delegate.log(problem); } From ec75826f6ff78d0a04d3f93e6ca92bad9308f75d Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 20 Oct 2023 16:28:15 +0200 Subject: [PATCH 018/216] Fix callTrace if inlined methods We need to keep the reference to the called method, not only the symbol of the to level class. This is important for the traces of the `assert` method that is defined in a different file. This might also be useful for macro annotations. This is also a solution to the awkward Select vs. Ident distinction to identify macros in `YCheckPositions`. --- compiler/src/dotty/tools/dotc/ast/Trees.scala | 5 +++-- .../src/dotty/tools/dotc/inlines/Inlines.scala | 14 -------------- .../dotty/tools/dotc/transform/PickleQuotes.scala | 2 +- .../src/dotty/tools/dotc/transform/PostTyper.scala | 2 +- .../tools/dotc/transform/YCheckPositions.scala | 12 ++++++------ 5 files changed, 11 insertions(+), 24 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 6941596e1c9b..230e35c5f070 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -31,7 +31,7 @@ object Trees { /** Property key for backquoted identifiers and definitions */ val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() - + val SyntheticUnit: Property.StickyKey[Unit] = Property.StickyKey() /** Trees take a parameter indicating what the type of their `tpe` field @@ -661,7 +661,8 @@ object Trees { * * @param call Info about the original call that was inlined * Until PostTyper, this is the full call, afterwards only - * a reference to the toplevel class from which the call was inlined. + * a reference to the method or the top-level class from + * which the call was inlined. * @param bindings Bindings for proxies to be used in the inlined code * @param expansion The inlined tree, minus bindings. * diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 25e9b1480370..bef56374b6f0 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -299,20 +299,6 @@ object Inlines: (new Reposition).transform(tree) end reposition - /** Leave only a call trace consisting of - * - a reference to the top-level class from which the call was inlined, - * - the call's position - * in the call field of an Inlined node. - * The trace has enough info to completely reconstruct positions. - * Note: For macros it returns a Select and for other inline methods it returns an Ident (this distinction is only temporary to be able to run YCheckPositions) - */ - def inlineCallTrace(callSym: Symbol, pos: SourcePosition)(using Context): Tree = { - assert(ctx.source == pos.source) - val topLevelCls = callSym.topLevelClass - if (callSym.is(Macro)) ref(topLevelCls.owner).select(topLevelCls.name)(using ctx.withOwner(topLevelCls.owner)).withSpan(pos.span) - else Ident(topLevelCls.typeRef).withSpan(pos.span) - } - private object Intrinsics: import dotty.tools.dotc.reporting.Diagnostic.Error private enum ErrorKind: diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index b368e47bf0b3..01bb474cec35 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -304,7 +304,7 @@ object PickleQuotes { def pickleAsTasty() = { val body1 = if body.isType then body - else Inlined(Inlines.inlineCallTrace(ctx.owner, quote.sourcePos), Nil, body) + else Inlined(ref(ctx.owner.topLevelClass.typeRef).withSpan(quote.span), Nil, body) val pickleQuote = PickledQuotes.pickleQuote(body1) val pickledQuoteStrings = pickleQuote match case x :: Nil => Literal(Constant(x)) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index ff322dad9ab6..3b8376ab4970 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -367,7 +367,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => val pos = call.sourcePos CrossVersionChecks.checkExperimentalRef(call.symbol, pos) withMode(Mode.InlinedCall)(transform(call)) - val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) + val callTrace = ref(call.symbol)(using ctx.withSource(pos.source)).withSpan(pos.span) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(tree))) case templ: Template => withNoCheckNews(templ.parents.flatMap(newPart)) { diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index 3cf74ee3fdb3..15adad47a3fc 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -35,7 +35,7 @@ class YCheckPositions extends Phase { val currentSource = sources.head assert(tree.source == currentSource, i"wrong source set for $tree # ${tree.uniqueId} of ${tree.getClass}, set to ${tree.source} but context had $currentSource\n ${tree.symbol.flagsString}") - // Recursivlely check children while keeping track of current source + // Recursively check children while keeping track of current source reporting.trace(i"check pos ${tree.getClass} ${tree.source} ${sources.head} $tree") { tree match { case tree @ Inlined(_, bindings, expansion) if tree.inlinedFromOuterScope => @@ -46,7 +46,7 @@ class YCheckPositions extends Phase { sources = old case tree @ Inlined(call, bindings, expansion) => // bindings.foreach(traverse(_)) // TODO check inline proxies (see tests/tun/lst) - sources = call.symbol.topLevelClass.source :: sources + sources = call.symbol.source :: sources if (!isMacro(call)) // FIXME macro implementations can drop Inlined nodes. We should reinsert them after macro expansion based on the positions of the trees traverse(expansion)(using inlineContext(tree).withSource(sources.head)) sources = sources.tail @@ -61,10 +61,10 @@ class YCheckPositions extends Phase { private def isMacro(call: Tree)(using Context) = call.symbol.is(Macro) || - (call.symbol.isClass && call.tpe.derivesFrom(defn.MacroAnnotationClass)) || - // The call of a macro after typer is encoded as a Select while other inlines are Ident - // TODO remove this distinction once Inline nodes of expanded macros can be trusted (also in Inliner.inlineCallTrace) - (!(ctx.phase <= postTyperPhase) && call.isInstanceOf[Select]) + (call.symbol.isClass && call.tpe.derivesFrom(defn.MacroAnnotationClass)) || + // In 3.0-3.3, the call of a macro after typer is encoded as a Select while other inlines are Ident. + // In those versions we kept the reference to the top-level class instead of the methods. + (!(ctx.phase <= postTyperPhase) && call.symbol.isClass && call.isInstanceOf[Select]) } From eb0c408050a86934516a896ebfc035ea40807bcc Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 20 Oct 2023 16:41:04 +0200 Subject: [PATCH 019/216] Enable some tests using the Scala 2 library TASTy --- .github/workflows/ci.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index aae6ec8b9b0b..636b91084529 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -209,6 +209,10 @@ jobs: run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" shell: cmd + - name: Test with Scala 2 library TASTy + run: sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/testCompilation i5" # only test a subset of test to avoid doubling the CI execution time + shell: cmd + - name: Scala.js Test run: sbt ";sjsJUnitTests/test ;set sjsJUnitTests/scalaJSLinkerConfig ~= switchToESModules ;sjsJUnitTests/test ;sjsCompilerTests/test" shell: cmd From 38b0168cc2cbba99e39bab89e459bfea51bda783 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 2 Nov 2023 09:44:43 +0100 Subject: [PATCH 020/216] Use `compiletime.uninitialized` in compiler --- .../tools/backend/jvm/BCodeHelpers.scala | 3 +- .../dotty/tools/backend/jvm/GenBCode.scala | 13 ++--- .../backend/jvm/GeneratedClassHandler.scala | 3 +- .../tools/backend/sjs/JSDefinitions.scala | 3 +- compiler/src/dotty/tools/dotc/Bench.scala | 3 +- compiler/src/dotty/tools/dotc/Run.scala | 4 +- .../src/dotty/tools/dotc/ast/Positioned.scala | 4 +- compiler/src/dotty/tools/dotc/ast/tpd.scala | 3 +- .../tools/dotc/core/ConstraintRunInfo.scala | 3 +- .../src/dotty/tools/dotc/core/Contexts.scala | 2 +- .../dotty/tools/dotc/core/Definitions.scala | 5 +- .../dotty/tools/dotc/core/Denotations.scala | 6 ++- .../tools/dotc/core/OrderingConstraint.scala | 4 +- .../src/dotty/tools/dotc/core/Phases.scala | 51 ++++++++++--------- .../src/dotty/tools/dotc/core/Scopes.scala | 1 + .../tools/dotc/core/SymDenotations.scala | 3 +- .../src/dotty/tools/dotc/core/Symbols.scala | 4 +- .../dotty/tools/dotc/core/TyperState.scala | 19 +++---- .../src/dotty/tools/dotc/core/Types.scala | 28 +++++----- .../dotc/core/classfile/ClassfileParser.scala | 10 ++-- .../classfile/ClassfileTastyUUIDParser.scala | 3 +- .../dotc/core/tasty/PositionUnpickler.scala | 8 +-- .../tools/dotc/core/tasty/TreeUnpickler.scala | 3 +- .../tools/dotc/parsing/CharArrayReader.scala | 4 +- .../dotc/parsing/xml/MarkupParsers.scala | 3 +- .../dotc/parsing/xml/SymbolicXMLBuilder.scala | 3 +- .../dotty/tools/dotc/plugins/Plugins.scala | 6 ++- .../tools/dotc/sbt/ExtractDependencies.scala | 10 ++-- .../tools/dotc/transform/CapturedVars.scala | 4 +- .../dotty/tools/dotc/transform/CtxLazy.scala | 4 +- .../tools/dotc/transform/Dependencies.scala | 5 +- .../dotty/tools/dotc/transform/Flatten.scala | 4 +- .../dotc/transform/ForwardDepChecks.scala | 8 +-- .../dotc/transform/Instrumentation.scala | 12 +++-- .../tools/dotc/transform/LambdaLift.scala | 4 +- .../dotty/tools/dotc/transform/LazyVals.scala | 3 +- .../tools/dotc/transform/MegaPhase.scala | 8 +-- .../dotty/tools/dotc/transform/Memoize.scala | 3 +- .../dotc/transform/OverridingPairs.scala | 6 ++- .../dotty/tools/dotc/transform/TailRec.scala | 4 +- .../transform/sjs/ExplicitJSClasses.scala | 3 +- .../dotty/tools/dotc/typer/Implicits.scala | 3 +- .../dotty/tools/dotc/typer/ImportInfo.scala | 6 ++- .../src/dotty/tools/dotc/typer/Namer.scala | 3 +- .../tools/dotc/typer/VarianceChecker.scala | 4 +- .../tools/dotc/util/GenericHashMap.scala | 8 +-- .../tools/dotc/util/GenericHashSet.scala | 8 +-- .../src/dotty/tools/dotc/util/IntMap.scala | 4 +- .../tools/dotc/util/PerfectHashing.scala | 8 +-- .../dotty/tools/dotc/util/SourceFile.scala | 3 +- .../tools/repl/CollectTopLevelImports.scala | 4 +- compiler/src/dotty/tools/repl/Rendering.scala | 5 +- .../src/dotty/tools/repl/ReplDriver.scala | 9 ++-- .../tools/dotc/reporting/TestReporter.scala | 7 +-- .../tools/vulpix/RunnerOrchestration.scala | 5 +- 55 files changed, 216 insertions(+), 141 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index c36c8c546635..3085c9411222 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -9,6 +9,7 @@ import scala.tools.asm import scala.tools.asm.AnnotationVisitor import scala.tools.asm.ClassWriter import scala.collection.mutable +import scala.compiletime.uninitialized import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.tpd @@ -576,7 +577,7 @@ trait BCodeHelpers extends BCodeIdiomatic { /* builder of mirror classes */ class JMirrorBuilder extends JCommonBuilder { - private var cunit: CompilationUnit = _ + private var cunit: CompilationUnit = uninitialized def getCurrentCUnit(): CompilationUnit = cunit; /* Generate a mirror class for a top-level module. A mirror class is a class diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 8c1691e88b80..6dcaaa23c61d 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -9,6 +9,7 @@ import Contexts._ import Symbols._ import dotty.tools.io._ import scala.collection.mutable +import scala.compiletime.uninitialized class GenBCode extends Phase { self => @@ -25,7 +26,7 @@ class GenBCode extends Phase { self => private val entryPoints = new mutable.HashSet[String]() def registerEntryPoint(s: String): Unit = entryPoints += s - private var _backendInterface: DottyBackendInterface = _ + private var _backendInterface: DottyBackendInterface = uninitialized def backendInterface(using ctx: Context): DottyBackendInterface = { if _backendInterface eq null then // Enforce usage of FreshContext so we would be able to modify compilation unit between runs @@ -36,7 +37,7 @@ class GenBCode extends Phase { self => _backendInterface } - private var _codeGen: CodeGen = _ + private var _codeGen: CodeGen = uninitialized def codeGen(using Context): CodeGen = { if _codeGen eq null then val int = backendInterface @@ -45,28 +46,28 @@ class GenBCode extends Phase { self => _codeGen } - private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _ + private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = uninitialized def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = { if _bTypes eq null then _bTypes = BTypesFromSymbols(backendInterface, frontendAccess) _bTypes } - private var _frontendAccess: PostProcessorFrontendAccess | Null = _ + private var _frontendAccess: PostProcessorFrontendAccess | Null = uninitialized def frontendAccess(using Context): PostProcessorFrontendAccess = { if _frontendAccess eq null then _frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints) _frontendAccess.nn } - private var _postProcessor: PostProcessor | Null = _ + private var _postProcessor: PostProcessor | Null = uninitialized def postProcessor(using Context): PostProcessor = { if _postProcessor eq null then _postProcessor = new PostProcessor(frontendAccess, bTypes) _postProcessor.nn } - private var _generatedClassHandler: GeneratedClassHandler | Null = _ + private var _generatedClassHandler: GeneratedClassHandler | Null = uninitialized def generatedClassHandler(using Context): GeneratedClassHandler = { if _generatedClassHandler eq null then _generatedClassHandler = GeneratedClassHandler(postProcessor) diff --git a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala index fc02d9597efe..f17f9d620d90 100644 --- a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala +++ b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala @@ -15,6 +15,7 @@ import dotty.tools.dotc.core.Phases import dotty.tools.dotc.core.Decorators.em import scala.language.unsafeNulls +import scala.compiletime.uninitialized /** * Interface to handle post-processing and classfile writing (see [[PostProcessor]]) of generated @@ -185,7 +186,7 @@ final private class CompilationUnitInPostProcess(private var classes: List[Gener } /** the main async task submitted onto the scheduler */ - var task: Future[Unit] = _ + var task: Future[Unit] = uninitialized val bufferedReporting = new PostProcessorFrontendAccess.BufferingBackendReporting() } \ No newline at end of file diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index 5336d60129ac..49755a8ee83e 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -12,6 +12,7 @@ import Symbols._ import StdNames._ import dotty.tools.dotc.config.SJSPlatform +import scala.compiletime.uninitialized object JSDefinitions { /** The Scala.js-specific definitions for the current context. */ @@ -249,7 +250,7 @@ final class JSDefinitions()(using Context) { @threadUnsafe lazy val Selectable_reflectiveSelectableFromLangReflectiveCallsR = SelectableModule.requiredMethodRef("reflectiveSelectableFromLangReflectiveCalls") def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol - private var allRefClassesCache: Set[Symbol] = _ + private var allRefClassesCache: Set[Symbol] = uninitialized def allRefClasses(using Context): Set[Symbol] = { if (allRefClassesCache == null) { val baseNames = List("Object", "Boolean", "Character", "Byte", "Short", diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index 5f5e9fc799b5..40ad66a0a9e6 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -6,6 +6,7 @@ import reporting.Reporter import io.AbstractFile import scala.annotation.internal.sharable +import scala.compiletime.uninitialized /** A main class for running compiler benchmarks. Can instantiate a given * number of compilers and run each (sequentially) a given number of times @@ -17,7 +18,7 @@ object Bench extends Driver: @sharable private var numCompilers = 1 @sharable private var waitAfter = -1 @sharable private var curCompiler = 0 - @sharable private var times: Array[Int] = _ + @sharable private var times: Array[Int] = uninitialized override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = var reporter: Reporter = emptyReporter diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index d85e03e92d60..7b09e7ef833a 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -65,7 +65,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint private var myUnits: List[CompilationUnit] = Nil private var myUnitsCached: List[CompilationUnit] = Nil - private var myFiles: Set[AbstractFile] = _ + private var myFiles: Set[AbstractFile] = uninitialized // `@nowarn` annotations by source file, populated during typer private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty @@ -511,7 +511,7 @@ object Run { var currentCompletedSubtraversalCount: Int = 0 // completed subphases in the current phase var seenPhaseCount: Int = 0 // how many phases we've seen so far - private var currPhase: Phase = uninitialized // initialized by enterPhase + private var currPhase: Phase = uninitialized // initialized by enterPhase private var subPhases: SubPhases = uninitialized // initialized by enterPhase private var currPhaseName: String = uninitialized // initialized by enterPhase private var nextPhaseName: String = uninitialized // initialized by enterPhase diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index dd783be7a9e1..980042f8292e 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -13,12 +13,14 @@ import ast.Trees.mods import annotation.constructorOnly import annotation.internal.sharable +import scala.compiletime.uninitialized + /** A base class for things that have positions (currently: modifiers and trees) */ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable { import Positioned.{ids, nextId, debugId} - private var mySpan: Span = _ + private var mySpan: Span = uninitialized private var mySource: SourceFile = src diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index e0b282e2d8db..7625a61d48aa 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -18,6 +18,7 @@ import typer.ConstFold import scala.annotation.tailrec import scala.collection.mutable.ListBuffer +import scala.compiletime.uninitialized /** Some creators for typed trees */ object tpd extends Trees.Instance[Type] with TypedTreeInfo { @@ -1309,7 +1310,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { trait TreeProvider { protected def computeRootTrees(using Context): List[Tree] - private var myTrees: List[Tree] | Null = _ + private var myTrees: List[Tree] | Null = uninitialized /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ def rootTrees(using Context): List[Tree] = diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index d2b1246a8149..412b4228677d 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -3,10 +3,11 @@ package core import Contexts._ import config.Printers.{default, typr} +import scala.compiletime.uninitialized trait ConstraintRunInfo { self: Run => private var maxSize = 0 - private var maxConstraint: Constraint | Null = _ + private var maxConstraint: Constraint | Null = uninitialized def recordConstraintSize(c: Constraint, size: Int): Unit = if (size > maxSize) { maxSize = size diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index a2adc0058938..b6474ca7973c 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -558,7 +558,7 @@ object Contexts { private var _owner: Symbol = uninitialized final def owner: Symbol = _owner - private var _tree: Tree[?]= _ + private var _tree: Tree[?] = uninitialized final def tree: Tree[?] = _tree private var _scope: Scope = uninitialized diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 97b1b7db1b21..205d43cd07ca 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -20,6 +20,7 @@ import cc.{CaptureSet, RetainingType} import ast.tpd.ref import scala.annotation.tailrec +import scala.compiletime.uninitialized object Definitions { @@ -44,7 +45,7 @@ object Definitions { class Definitions { import Definitions._ - private var initCtx: Context = _ + private var initCtx: Context = uninitialized private given currentContext[Dummy_so_its_a_def]: Context = initCtx private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = @@ -2001,7 +2002,7 @@ class Definitions { class PerRun[T](generate: Context ?=> T) { private var current: RunId = NoRunId - private var cached: T = _ + private var cached: T = uninitialized def apply()(using Context): T = { if (current != ctx.runId) { cached = generate diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 640ba8015be7..1969c87d7a86 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -24,6 +24,8 @@ import util.common._ import typer.ProtoTypes.NoViewsAllowed import collection.mutable.ListBuffer +import scala.compiletime.uninitialized + /** Denotations represent the meaning of symbols and named types. * The following diagram shows how the principal types of denotations * and their denoting entities relate to each other. Lines ending in @@ -121,8 +123,8 @@ object Denotations { /** Map `f` over all single denotations and aggregate the results with `g`. */ def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T - private var cachedPrefix: Type = _ - private var cachedAsSeenFrom: AsSeenFromResult = _ + private var cachedPrefix: Type = uninitialized + private var cachedAsSeenFrom: AsSeenFromResult = uninitialized private var validAsSeenFrom: Period = Nowhere type AsSeenFromResult <: PreDenotation diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 0328cea9b3ca..0f05778266fd 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -14,6 +14,8 @@ import annotation.tailrec import annotation.internal.sharable import cc.{CapturingType, derivedCapturingType} +import scala.compiletime.uninitialized + object OrderingConstraint { /** If true, use reverse dependencies in `replace` to avoid checking the bounds @@ -881,7 +883,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, i += 1 } - private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = _ + private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = uninitialized /** The uninstantiated typevars of this constraint */ def uninstVars: collection.Seq[TypeVar] = { diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 9ba8332a8634..dd354b66cb4a 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -21,6 +21,7 @@ import typer.ImportInfo.withRootImports import ast.{tpd, untpd} import scala.annotation.internal.sharable import scala.util.control.NonFatal +import scala.compiletime.uninitialized object Phases { @@ -205,30 +206,30 @@ object Phases { if nextDenotTransformerId(i) == phase.id then nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1) - private var myParserPhase: Phase = _ - private var myTyperPhase: Phase = _ - private var myPostTyperPhase: Phase = _ - private var mySbtExtractDependenciesPhase: Phase = _ - private var myPicklerPhase: Phase = _ - private var myInliningPhase: Phase = _ - private var myStagingPhase: Phase = _ - private var mySplicingPhase: Phase = _ - private var myFirstTransformPhase: Phase = _ - private var myCollectNullableFieldsPhase: Phase = _ - private var myRefChecksPhase: Phase = _ - private var myPatmatPhase: Phase = _ - private var myElimRepeatedPhase: Phase = _ - private var myElimByNamePhase: Phase = _ - private var myExtensionMethodsPhase: Phase = _ - private var myExplicitOuterPhase: Phase = _ - private var myGettersPhase: Phase = _ - private var myErasurePhase: Phase = _ - private var myElimErasedValueTypePhase: Phase = _ - private var myLambdaLiftPhase: Phase = _ - private var myCountOuterAccessesPhase: Phase = _ - private var myFlattenPhase: Phase = _ - private var myGenBCodePhase: Phase = _ - private var myCheckCapturesPhase: Phase = _ + private var myParserPhase: Phase = uninitialized + private var myTyperPhase: Phase = uninitialized + private var myPostTyperPhase: Phase = uninitialized + private var mySbtExtractDependenciesPhase: Phase = uninitialized + private var myPicklerPhase: Phase = uninitialized + private var myInliningPhase: Phase = uninitialized + private var myStagingPhase: Phase = uninitialized + private var mySplicingPhase: Phase = uninitialized + private var myFirstTransformPhase: Phase = uninitialized + private var myCollectNullableFieldsPhase: Phase = uninitialized + private var myRefChecksPhase: Phase = uninitialized + private var myPatmatPhase: Phase = uninitialized + private var myElimRepeatedPhase: Phase = uninitialized + private var myElimByNamePhase: Phase = uninitialized + private var myExtensionMethodsPhase: Phase = uninitialized + private var myExplicitOuterPhase: Phase = uninitialized + private var myGettersPhase: Phase = uninitialized + private var myErasurePhase: Phase = uninitialized + private var myElimErasedValueTypePhase: Phase = uninitialized + private var myLambdaLiftPhase: Phase = uninitialized + private var myCountOuterAccessesPhase: Phase = uninitialized + private var myFlattenPhase: Phase = uninitialized + private var myGenBCodePhase: Phase = uninitialized + private var myCheckCapturesPhase: Phase = uninitialized final def parserPhase: Phase = myParserPhase final def typerPhase: Phase = myTyperPhase @@ -389,7 +390,7 @@ object Phases { def printingContext(ctx: Context): Context = ctx private var myPeriod: Period = Periods.InvalidPeriod - private var myBase: ContextBase = _ + private var myBase: ContextBase = uninitialized private var myErasedTypes = false private var myFlatClasses = false private var myRefChecked = false diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 99076b422358..7762ce8da339 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -19,6 +19,7 @@ import printing.Printer import SymDenotations.NoDenotation import collection.mutable +import scala.compiletime.uninitialized object Scopes { diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 395164b1dd48..c2e0e6f717dd 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -27,6 +27,7 @@ import transform.TypeUtils._ import cc.{CapturingType, derivedCapturingType} import scala.annotation.internal.sharable +import scala.compiletime.uninitialized object SymDenotations { @@ -2431,7 +2432,7 @@ object SymDenotations { initPrivateWithin: Symbol) extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) { - private var packageObjsCache: List[ClassDenotation] = _ + private var packageObjsCache: List[ClassDenotation] = uninitialized private var packageObjsRunId: RunId = NoRunId private var ambiguityWarningIssued: Boolean = false diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 1e21c1a743f4..9c7c6a9a18bf 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -33,6 +33,8 @@ import scala.annotation.internal.sharable import config.Printers.typr import dotty.tools.dotc.classpath.FileUtils.isScalaBinary +import scala.compiletime.uninitialized + object Symbols { implicit def eqSymbol: CanEqual[Symbol, Symbol] = CanEqual.derived @@ -88,7 +90,7 @@ object Symbols { ctx.settings.YcheckInitGlobal.value /** The last denotation of this symbol */ - private var lastDenot: SymDenotation = _ + private var lastDenot: SymDenotation = uninitialized private var checkedPeriod: Period = Nowhere private[core] def invalidateDenotCache(): Unit = { checkedPeriod = Nowhere } diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index d2df2a2aebef..54ddd4c0eb6b 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -14,6 +14,7 @@ import util.{Stats, SimpleIdentityMap} import Decorators._ import scala.annotation.internal.sharable +import scala.compiletime.uninitialized object TyperState { @sharable private var nextId: Int = 0 @@ -44,19 +45,19 @@ object TyperState { class TyperState() { import TyperState.LevelMap - private var myId: Int = _ + private var myId: Int = uninitialized def id: Int = myId - private var previous: TyperState | Null = _ + private var previous: TyperState | Null = uninitialized - private var myReporter: Reporter = _ + private var myReporter: Reporter = uninitialized def reporter: Reporter = myReporter /** A fresh type state with the same constraint as this one and the given reporter */ def setReporter(reporter: Reporter): this.type = { myReporter = reporter; this } - private var myConstraint: Constraint = _ + private var myConstraint: Constraint = uninitialized def constraint: Constraint = myConstraint def constraint_=(c: Constraint)(using Context): Unit = { @@ -66,9 +67,9 @@ class TyperState() { c.checkConsistentVars() } - private var previousConstraint: Constraint = _ + private var previousConstraint: Constraint = uninitialized - private var myIsCommittable: Boolean = _ + private var myIsCommittable: Boolean = uninitialized def isCommittable: Boolean = myIsCommittable @@ -79,7 +80,7 @@ class TyperState() { def isGlobalCommittable: Boolean = isCommittable && (previous == null || previous.uncheckedNN.isGlobalCommittable) - private var isCommitted: Boolean = _ + private var isCommitted: Boolean = uninitialized /** The set of uninstantiated type variables which have this state as their owning state. * @@ -87,11 +88,11 @@ class TyperState() { * if `tstate.isCommittable` then * `tstate.ownedVars.contains(tvar)` iff `tvar.owningState.get eq tstate` */ - private var myOwnedVars: TypeVars = _ + private var myOwnedVars: TypeVars = uninitialized def ownedVars: TypeVars = myOwnedVars def ownedVars_=(vs: TypeVars): Unit = myOwnedVars = vs - private var upLevels: LevelMap = _ + private var upLevels: LevelMap = uninitialized /** Initializes all fields except reporter, isCommittable, which need to be * set separately. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index eb878b430183..f26cf432277f 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2164,7 +2164,7 @@ object Types { /** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs */ trait CaptureRef extends SingletonType: - private var myCaptureSet: CaptureSet | Null = _ + private var myCaptureSet: CaptureSet | Null = uninitialized private var myCaptureSetRunId: Int = NoRunId private var mySingletonCaptureSet: CaptureSet.Const | Null = null @@ -2285,7 +2285,7 @@ object Types { private var lastSymbol: Symbol | Null = null private var checkedPeriod: Period = Nowhere private var myStableHash: Byte = 0 - private var mySignature: Signature = _ + private var mySignature: Signature = uninitialized private var mySignatureRunId: Int = NoRunId // Invariants: @@ -2941,7 +2941,7 @@ object Types { type ThisName = TypeName private var myCanDropAliasPeriod: Period = Nowhere - private var myCanDropAlias: Boolean = _ + private var myCanDropAlias: Boolean = uninitialized /** Given an alias type `type A = B` where a recursive comparison with `B` yields * `false`, can we conclude that the comparison is definitely false? @@ -3405,7 +3405,7 @@ object Types { abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType { def isAnd: Boolean = true private var myBaseClassesPeriod: Period = Nowhere - private var myBaseClasses: List[ClassSymbol] = _ + private var myBaseClasses: List[ClassSymbol] = uninitialized /** Base classes are the merge of the operand base classes. */ override final def baseClasses(using Context): List[ClassSymbol] = { if (myBaseClassesPeriod != ctx.period) { @@ -3498,7 +3498,7 @@ object Types { def isAnd: Boolean = false def isSoft: Boolean private var myBaseClassesPeriod: Period = Nowhere - private var myBaseClasses: List[ClassSymbol] = _ + private var myBaseClasses: List[ClassSymbol] = uninitialized /** Base classes are the intersection of the operand base classes. */ override final def baseClasses(using Context): List[ClassSymbol] = { if (myBaseClassesPeriod != ctx.period) { @@ -3527,7 +3527,7 @@ object Types { myFactorCount else 1 - private var myJoin: Type = _ + private var myJoin: Type = uninitialized private var myJoinPeriod: Period = Nowhere /** Replace or type by the closest non-or type above it */ @@ -3541,7 +3541,7 @@ object Types { myJoin } - private var myUnion: Type = _ + private var myUnion: Type = uninitialized private var myUnionPeriod: Period = Nowhere override def widenUnionWithoutNull(using Context): Type = @@ -3556,8 +3556,8 @@ object Types { myUnion private var atomsRunId: RunId = NoRunId - private var myAtoms: Atoms = _ - private var myWidened: Type = _ + private var myAtoms: Atoms = uninitialized + private var myWidened: Type = uninitialized private def computeAtoms()(using Context): Atoms = val tp1n = tp1.normalized @@ -3797,11 +3797,11 @@ object Types { // (1) mySignatureRunId != NoRunId => mySignature != null // (2) myJavaSignatureRunId != NoRunId => myJavaSignature != null - private var mySignature: Signature = _ + private var mySignature: Signature = uninitialized private var mySignatureRunId: Int = NoRunId - private var myJavaSignature: Signature = _ + private var myJavaSignature: Signature = uninitialized private var myJavaSignatureRunId: Int = NoRunId - private var myScala2Signature: Signature = _ + private var myScala2Signature: Signature = uninitialized private var myScala2SignatureRunId: Int = NoRunId /** If `isJava` is false, the Scala signature of this method. Otherwise, its Java signature. @@ -4993,7 +4993,7 @@ object Types { def underlying(using Context): Type = bound private var myReduced: Type | Null = null - private var reductionContext: util.MutableMap[Type, Type] = _ + private var reductionContext: util.MutableMap[Type, Type] = uninitialized override def tryNormalize(using Context): Type = try @@ -5420,7 +5420,7 @@ object Types { override def stripped(using Context): Type = parent.stripped private var isRefiningKnown = false - private var isRefiningCache: Boolean = _ + private var isRefiningCache: Boolean = uninitialized def isRefining(using Context): Boolean = { if (!isRefiningKnown) { diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 5e816502f359..349dbc445971 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -25,6 +25,8 @@ import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal import dotty.tools.dotc.classpath.FileUtils.classToTasty +import scala.compiletime.uninitialized + object ClassfileParser { import ClassfileConstants._ @@ -237,11 +239,11 @@ class ClassfileParser( protected val staticModule: Symbol = moduleRoot.sourceModule(using ictx) - protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions - protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions - protected var pool: ConstantPool = _ // the classfile's constant pool + protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions + protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions + protected var pool: ConstantPool = uninitialized // the classfile's constant pool - protected var currentClassName: SimpleName = _ // JVM name of the current class + protected var currentClassName: SimpleName = uninitialized // JVM name of the current class protected var classTParams: Map[Name, Symbol] = Map() private var Scala2UnpicklingMode = Mode.Scala2Unpickling diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala index a9c91a68bb60..4c4885fd5313 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala @@ -2,6 +2,7 @@ package dotty.tools.dotc package core.classfile import scala.language.unsafeNulls +import scala.compiletime.uninitialized import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Decorators._ @@ -21,7 +22,7 @@ class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { import ClassfileConstants._ - private var pool: ConstantPool = _ // the classfile's constant pool + private var pool: ConstantPool = uninitialized // the classfile's constant pool def checkTastyUUID(tastyUUID: UUID)(using Context): Unit = try ctx.base.reusableDataReader.withInstance { reader => implicit val reader2 = reader.reset(classfile) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index 9c66e43eae80..bc58138f2db6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -3,6 +3,8 @@ package dotc package core package tasty +import scala.compiletime.uninitialized + import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader} import TastyFormat.SOURCE import TastyBuffer.{Addr, NameRef} @@ -14,9 +16,9 @@ import Names.TermName class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { import reader._ - private var myLineSizes: Array[Int] = _ - private var mySpans: util.HashMap[Addr, Span] = _ - private var mySourcePaths: util.HashMap[Addr, String] = _ + private var myLineSizes: Array[Int] = uninitialized + private var mySpans: util.HashMap[Addr, Span] = uninitialized + private var mySourcePaths: util.HashMap[Addr, String] = uninitialized private var isDefined = false def ensureDefined(): Unit = { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 1748b1edd08e..840bb3bd94fb 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -46,6 +46,7 @@ import dotty.tools.tasty.TastyFormat._ import scala.annotation.constructorOnly import scala.annotation.internal.sharable +import scala.compiletime.uninitialized /** Unpickler for typed trees * @param reader the reader from which to unpickle @@ -88,7 +89,7 @@ class TreeUnpickler(reader: TastyReader, private var seenRoots: Set[Symbol] = Set() /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */ - private var ownerTree: OwnerTree = _ + private var ownerTree: OwnerTree = uninitialized /** Was unpickled class compiled with pureFunctions? */ private var withPureFuns: Boolean = false diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala index c63409d0d52b..a3769f4f813d 100644 --- a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala +++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala @@ -4,6 +4,8 @@ package parsing import util.Chars._ +import scala.compiletime.uninitialized + abstract class CharArrayReader { self => val buf: Array[Char] @@ -16,7 +18,7 @@ abstract class CharArrayReader { self => protected def error(msg: String, offset: Int): Unit /** the last read character */ - var ch: Char = _ + var ch: Char = uninitialized /** The offset one past the last read character */ var charOffset: Int = startFrom diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index b3f41fab9eaa..0f0a2a3ee61f 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -4,6 +4,7 @@ package parsing package xml import scala.language.unsafeNulls +import scala.compiletime.uninitialized import scala.collection.mutable import scala.collection.BufferedIterator @@ -71,7 +72,7 @@ object MarkupParsers { if (ch == SU) throw TruncatedXMLControl else reportSyntaxError(msg) - var input : CharArrayReader = _ + var input : CharArrayReader = uninitialized def lookahead(): BufferedIterator[Char] = (input.buf drop input.charOffset).iterator.buffered diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala index 0e70cc077fa4..6d49360feb85 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala @@ -4,6 +4,7 @@ package parsing package xml import scala.language.unsafeNulls +import scala.compiletime.uninitialized import scala.collection.mutable import core._ @@ -32,7 +33,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { import parser.atSpan - private[parsing] var isPattern: Boolean = _ + private[parsing] var isPattern: Boolean = uninitialized private object xmltypes extends ScalaTypeNames { val _Comment: TypeName = "Comment" diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index c44fe4cf59b4..12acc3d7b4c3 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -11,6 +11,8 @@ import dotty.tools.io._ import Phases._ import config.Printers.plugins.{ println => debug } +import scala.compiletime.uninitialized + /** Support for run-time loading of compiler plugins. * * @author Lex Spoon @@ -44,7 +46,7 @@ trait Plugins { goods map (_.get) } - private var _roughPluginsList: List[Plugin] = _ + private var _roughPluginsList: List[Plugin] = uninitialized protected def roughPluginsList(using Context): List[Plugin] = if (_roughPluginsList == null) { _roughPluginsList = loadRoughPluginsList @@ -96,7 +98,7 @@ trait Plugins { plugs } - private var _plugins: List[Plugin] = _ + private var _plugins: List[Plugin] = uninitialized def plugins(using Context): List[Plugin] = if (_plugins == null) { _plugins = loadPlugins diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index d0330a955148..60cfeeced209 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -29,7 +29,7 @@ import xsbti.api.DependencyContext._ import scala.jdk.CollectionConverters.* import scala.collection.{Set, mutable} - +import scala.compiletime.uninitialized /** This phase sends information on classes' dependencies to sbt via callbacks. * @@ -519,9 +519,9 @@ class DependencyRecorder { } } - private var lastOwner: Symbol = _ - private var lastDepSource: Symbol = _ - private var lastFoundCache: FoundDepsInClass | Null = _ + private var lastOwner: Symbol = uninitialized + private var lastDepSource: Symbol = uninitialized + private var lastFoundCache: FoundDepsInClass | Null = uninitialized /** The source of the dependency according to `nonLocalEnclosingClass` * if it exists, otherwise fall back to `responsibleForImports`. @@ -558,7 +558,7 @@ class DependencyRecorder { clazz } - private var _responsibleForImports: Symbol = _ + private var _responsibleForImports: Symbol = uninitialized /** Top level import dependencies are registered as coming from a first top level * class/trait/object declared in the compilation unit. If none exists, issue a warning and return NoSymbol. diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 8964beb26051..a018bbd1a3ac 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -14,6 +14,8 @@ import core.Constants._ import util.Store import dotty.tools.uncheckedNN +import scala.compiletime.uninitialized + /** This phase translates variables that are captured in closures to * heap-allocated refs. */ @@ -25,7 +27,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: override def description: String = CapturedVars.description - private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = _ + private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = uninitialized private def captured(using Context) = ctx.store(Captured) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala index 808cf928ecc2..4f8f1b195bb3 100644 --- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala +++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala @@ -3,6 +3,8 @@ package transform import core.Contexts._ +import scala.compiletime.uninitialized + /** Utility class for lazy values whose evaluation depends on a context. * This should be used whenever the evaluation of a lazy expression * depends on some context, but the value can be re-used afterwards @@ -12,7 +14,7 @@ import core.Contexts._ * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards. */ class CtxLazy[T](expr: Context ?=> T) { - private var myValue: T = _ + private var myValue: T = uninitialized private var forced = false def apply()(using Context): T = { if (!forced) { diff --git a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala index 08189e107d94..46285c324307 100644 --- a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala +++ b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala @@ -6,6 +6,7 @@ import Symbols.*, Contexts.*, Types.*, Flags.*, Decorators.* import SymUtils.* import collection.mutable.{LinkedHashMap, LinkedHashSet} import annotation.constructorOnly +import scala.compiletime.uninitialized import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -51,10 +52,10 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co private val logicOwner = new LinkedHashMap[Symbol, Symbol] /** A flag to indicate whether new free variables have been found */ - private var changedFreeVars: Boolean = _ + private var changedFreeVars: Boolean = uninitialized /** A flag to indicate whether lifted owners have changed */ - private var changedLogicOwner: Boolean = _ + private var changedLogicOwner: Boolean = uninitialized private def newSymSet: LinkedHashSet[Symbol] = new LinkedHashSet[Symbol] diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala index 25df51d0916d..84f3d35c80b7 100644 --- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala +++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala @@ -10,6 +10,8 @@ import collection.mutable import MegaPhase.MiniPhase import util.Store +import scala.compiletime.uninitialized + /** Lift nested classes to toplevel */ class Flatten extends MiniPhase with SymTransformer { import ast.tpd._ @@ -24,7 +26,7 @@ class Flatten extends MiniPhase with SymTransformer { override def changesMembers: Boolean = true // the phase removes inner classes - private var LiftedDefs: Store.Location[mutable.ListBuffer[Tree] | Null] = _ + private var LiftedDefs: Store.Location[mutable.ListBuffer[Tree] | Null] = uninitialized private def liftedDefs(using Context) = ctx.store(LiftedDefs) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala index bf8a6fa6c7bf..afe78ce1296d 100644 --- a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala +++ b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala @@ -10,6 +10,8 @@ import collection.immutable import ast.tpd import MegaPhase.MiniPhase +import scala.compiletime.uninitialized + object ForwardDepChecks: import tpd.* @@ -37,8 +39,8 @@ object ForwardDepChecks: (m1, idx + 1) }._1 var maxIndex: Int = Int.MinValue - var refSpan: Span = _ - var refSym: Symbol = _ + var refSpan: Span = uninitialized + var refSym: Symbol = uninitialized override def enterReference(sym: Symbol, span: Span): Unit = if (sym.exists && sym.owner.isTerm) @@ -63,7 +65,7 @@ class ForwardDepChecks extends MiniPhase: override def runsAfter: Set[String] = Set(ElimByName.name) - private var LevelInfo: Store.Location[OptLevelInfo] = _ + private var LevelInfo: Store.Location[OptLevelInfo] = uninitialized private def currentLevel(using Context): OptLevelInfo = ctx.store(LevelInfo) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala index 046147f20d82..9802de34a6e0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala +++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala @@ -12,6 +12,8 @@ import MegaPhase._ import Names._ import Constants.Constant +import scala.compiletime.uninitialized + /** The phase is enabled if the -Yinstrument option is set. * If enabled, it counts the number of closures or allocations for each source position. @@ -40,11 +42,11 @@ class Instrumentation extends MiniPhase { thisPhase => "::", "+=", "toString", "newArray", "box", "toCharArray", "termName", "typeName", "slice", "staticRef", "requiredClass") - private var namesToRecord: Set[Name] = _ - private var collectionNamesToRecord: Set[Name] = _ - private var Stats_doRecord: Symbol = _ - private var Stats_doRecordSize: Symbol = _ - private var CollectionIterableClass: ClassSymbol = _ + private var namesToRecord: Set[Name] = uninitialized + private var collectionNamesToRecord: Set[Name] = uninitialized + private var Stats_doRecord: Symbol = uninitialized + private var Stats_doRecordSize: Symbol = uninitialized + private var CollectionIterableClass: ClassSymbol = uninitialized override def prepareForUnit(tree: Tree)(using Context): Context = namesToRecord = namesOfInterest.map(_.toTermName).toSet diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala index 6ec0f330efff..5be43fd56952 100644 --- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -18,6 +18,8 @@ import ExplicitOuter.outer import util.Store import collection.mutable.{HashMap, LinkedHashMap, ListBuffer} +import scala.compiletime.uninitialized + object LambdaLift: import ast.tpd._ @@ -266,7 +268,7 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => // lambda lift for super calls right. Witness the implementation restrictions to // this effect in scalac. - private var Lifter: Store.Location[Lifter] = _ + private var Lifter: Store.Location[Lifter] = uninitialized private def lifter(using Context) = ctx.store(Lifter) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index b433e37e39c0..4fabaf99f843 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -19,6 +19,7 @@ import transform.MegaPhase.MiniPhase import transform.SymUtils.* import scala.collection.mutable +import scala.compiletime.uninitialized class LazyVals extends MiniPhase with IdentityDenotTransformer { import LazyVals._ @@ -47,7 +48,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val containerFlagsMask: FlagSet = Method | Lazy | Accessor | Module /** A map of lazy values to the fields they should null after initialization. */ - private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = _ + private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = uninitialized private def nullableFor(sym: Symbol)(using Context) = { // optimisation: value only used once, we can remove the value from the map val nullables = lazyValNullables.nn.remove(sym) diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index fe70a1659036..eff634b485b8 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -2,6 +2,8 @@ package dotty.tools package dotc package transform +import scala.compiletime.uninitialized + import core._ import Contexts._, Phases._, Symbols._, Decorators._ import Flags.PackageVal @@ -31,8 +33,8 @@ object MegaPhase { */ abstract class MiniPhase extends Phase { - private[MegaPhase] var superPhase: MegaPhase = _ - private[MegaPhase] var idxInGroup: Int = _ + private[MegaPhase] var superPhase: MegaPhase = uninitialized + private[MegaPhase] var idxInGroup: Int = uninitialized /** List of names of phases that should have finished their processing of all compilation units * before this phase starts @@ -151,7 +153,7 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { else s"MegaPhase{${miniPhases.head.phaseName},...,${miniPhases.last.phaseName}}" - private var relaxedTypingCache: Boolean = _ + private var relaxedTypingCache: Boolean = uninitialized private var relaxedTypingKnown = false override final def relaxedTyping: Boolean = { diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index af6533cfc17f..91f394866035 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -19,6 +19,7 @@ import StdNames.nme import sjs.JSSymUtils._ import util.Store +import scala.compiletime.uninitialized object Memoize { val name: String = "memoize" @@ -52,7 +53,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => override def description: String = Memoize.description - private var MyState: Store.Location[MyState] = _ + private var MyState: Store.Location[MyState] = uninitialized private def myState(using Context): MyState = ctx.store(MyState) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 0e38e9c074cd..92039a8b9af8 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -10,6 +10,8 @@ import collection.immutable.BitSet import scala.annotation.tailrec import cc.isCaptureChecking +import scala.compiletime.uninitialized + /** A module that can produce a kind of iterator (`Cursor`), * which yields all pairs of overriding/overridden symbols * that are visible in some baseclass, unless there's a parent class @@ -118,10 +120,10 @@ object OverridingPairs: private var nextEntry = curEntry /** The current candidate symbol for overriding */ - var overriding: Symbol = _ + var overriding: Symbol = uninitialized /** If not null: The symbol overridden by overriding */ - var overridden: Symbol = _ + var overridden: Symbol = uninitialized //@M: note that next is called once during object initialization final def hasNext: Boolean = nextEntry != null diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 741b9d1627fe..45330b6e9f5d 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -13,6 +13,8 @@ import transform.MegaPhase.MiniPhase import util.LinearSet import dotty.tools.uncheckedNN +import scala.compiletime.uninitialized + /** A Tail Rec Transformer. * * What it does: @@ -232,7 +234,7 @@ class TailRec extends MiniPhase { var failureReported: Boolean = false /** The `tailLabelN` label symbol, used to encode a `continue` from the infinite `while` loop. */ - private var myContinueLabel: Symbol | Null = _ + private var myContinueLabel: Symbol | Null = uninitialized def continueLabel(using Context): Symbol = { if (myContinueLabel == null) myContinueLabel = newSymbol(method, TailLabelName.fresh(), Label, defn.UnitType) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 705b3cc404a8..71783c509596 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -3,6 +3,7 @@ package dotc package transform package sjs +import scala.compiletime.uninitialized import MegaPhase._ import core.Annotations._ @@ -236,7 +237,7 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => override def description: String = ExplicitJSClasses.description - private var MyState: Store.Location[MyState] = _ + private var MyState: Store.Location[MyState] = uninitialized private def myState(using Context) = ctx.store(MyState) override def initContext(ctx: FreshContext): Unit = diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5cba406a302e..58323f708144 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -37,6 +37,7 @@ import annotation.tailrec import scala.annotation.internal.sharable import scala.annotation.threadUnsafe +import scala.compiletime.uninitialized /** Implicit resolution */ object Implicits: @@ -620,7 +621,7 @@ trait ImplicitRunInfo: object collectParts extends TypeTraverser: - private var parts: mutable.LinkedHashSet[Type] = _ + private var parts: mutable.LinkedHashSet[Type] = uninitialized private val partSeen = util.HashSet[Type]() def traverse(t: Type) = try diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index ba05cba229ae..4850e7bdffa9 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -12,6 +12,8 @@ import StdNames.nme import printing.Texts.Text import NameKinds.QualifiedName +import scala.compiletime.uninitialized + object ImportInfo { case class RootRef(refFn: () => TermRef, isPredef: Boolean = false) @@ -66,7 +68,7 @@ class ImportInfo(symf: Context ?=> Symbol, } mySym.uncheckedNN } - private var mySym: Symbol | Null = _ + private var mySym: Symbol | Null = uninitialized /** The (TermRef) type of the qualifier of the import clause */ def site(using Context): Type = importSym.info match { @@ -180,7 +182,7 @@ class ImportInfo(symf: Context ?=> Symbol, private val isLanguageImport: Boolean = untpd.languageImport(qualifier).isDefined - private var myUnimported: Symbol | Null = _ + private var myUnimported: Symbol | Null = uninitialized private var featureCache: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.empty diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 44d8077e44db..6d6580e740db 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -30,6 +30,7 @@ import reporting._ import config.Feature.sourceVersion import config.SourceVersion._ +import scala.compiletime.uninitialized /** This class creates symbols from definitions and imports and gives them * lazy types. @@ -1078,7 +1079,7 @@ class Namer { typer: Typer => protected implicit val completerCtx: Context = localContext(cls) - private var localCtx: Context = _ + private var localCtx: Context = uninitialized /** info to be used temporarily while completing the class, to avoid cyclic references. */ private var tempInfo: TempClassInfo | Null = null diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index 21fa9eed0df4..7d129e128518 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -13,6 +13,8 @@ import config.Feature.migrateTo3 import reporting.trace import printing.Formatting.hl +import scala.compiletime.uninitialized + /** Provides `check` method to check that all top-level definitions * in tree are variance correct. Does not recurse inside methods. * The method should be invoked once for each Template. @@ -67,7 +69,7 @@ class VarianceChecker(using Context) { import tpd._ private object Validator extends TypeAccumulator[Option[VarianceError]] { - private var base: Symbol = _ + private var base: Symbol = uninitialized /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`. * The search proceeds from `base` to the owner of `tvar`. diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala index 6d013717ec52..8c0506573109 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala @@ -1,6 +1,8 @@ package dotty.tools package dotc.util +import scala.compiletime.uninitialized + object GenericHashMap: /** The number of elements up to which dense packing is used. @@ -27,9 +29,9 @@ abstract class GenericHashMap[Key, Value] (initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]: import GenericHashMap.DenseLimit - protected var used: Int = _ - protected var limit: Int = _ - protected var table: Array[AnyRef | Null] = _ + protected var used: Int = uninitialized + protected var limit: Int = uninitialized + protected var table: Array[AnyRef | Null] = uninitialized clear() private def allocate(capacity: Int) = diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala index 7abe40a8e13d..3c30e8e73300 100644 --- a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala @@ -2,6 +2,8 @@ package dotty.tools.dotc.util import dotty.tools.uncheckedNN +import scala.compiletime.uninitialized + object GenericHashSet: /** The number of elements up to which dense packing is used. @@ -24,9 +26,9 @@ object GenericHashSet: abstract class GenericHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { import GenericHashSet.DenseLimit - protected var used: Int = _ - protected var limit: Int = _ - protected var table: Array[AnyRef | Null] = _ + protected var used: Int = uninitialized + protected var limit: Int = uninitialized + protected var table: Array[AnyRef | Null] = uninitialized clear() diff --git a/compiler/src/dotty/tools/dotc/util/IntMap.scala b/compiler/src/dotty/tools/dotc/util/IntMap.scala index 008ea866f70e..1d04567e99c7 100644 --- a/compiler/src/dotty/tools/dotc/util/IntMap.scala +++ b/compiler/src/dotty/tools/dotc/util/IntMap.scala @@ -1,5 +1,7 @@ package dotty.tools.dotc.util +import scala.compiletime.uninitialized + /** A dense map from some `Key` type to `Int. Dense means: All keys and values * are stored in arrays from 0 up to the size of the map. Keys and values * can be obtained by index using `key(index)` and `value(index)`. Values @@ -19,7 +21,7 @@ package dotty.tools.dotc.util */ final class IntMap[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends PerfectHashing[Key](initialCapacity, capacityMultiple): - private var values: Array[Int] = _ + private var values: Array[Int] = uninitialized def default: Int = -1 diff --git a/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala b/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala index fca790837959..1351afb68f3d 100644 --- a/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala +++ b/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala @@ -1,5 +1,7 @@ package dotty.tools.dotc.util +import scala.compiletime.uninitialized + object PerfectHashing: /** The number of elements up to which dense packing is used. @@ -22,9 +24,9 @@ object PerfectHashing: class PerfectHashing[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2): import PerfectHashing.DenseLimit - private var used: Int = _ - private var table: Array[Int] = _ - private var keys: Array[AnyRef] = _ + private var used: Int = uninitialized + private var table: Array[Int] = uninitialized + private var keys: Array[AnyRef] = uninitialized clear() diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 3462036d7ba6..d35509f591a3 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -13,6 +13,7 @@ import Chars._ import scala.annotation.internal.sharable import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.compiletime.uninitialized import scala.util.chaining.given import java.io.File.separator @@ -137,7 +138,7 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends buf.toArray } - private var lineIndicesCache: Array[Int] = _ + private var lineIndicesCache: Array[Int] = uninitialized private def lineIndices: Array[Int] = if lineIndicesCache eq null then lineIndicesCache = calculateLineIndicesFromContents() diff --git a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala index d539c1986f8d..8a12ae22be37 100644 --- a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala +++ b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala @@ -4,6 +4,8 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases.Phase +import scala.compiletime.uninitialized + /** A phase that collects user defined top level imports. * * These imports must be collected as typed trees and therefore @@ -14,7 +16,7 @@ class CollectTopLevelImports extends Phase { def phaseName: String = "collectTopLevelImports" - private var myImports: List[Import] = _ + private var myImports: List[Import] = uninitialized def imports: List[Import] = myImports def run(using Context): Unit = { diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index c647ef302bb9..972baa39877b 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -10,6 +10,7 @@ import reporting.Diagnostic import transform.ValueClasses import util.StackTraceOps.* +import scala.compiletime.uninitialized import scala.util.control.NonFatal /** This rendering object uses `ClassLoader`s to accomplish crossing the 4th @@ -24,10 +25,10 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): import Rendering._ - var myClassLoader: AbstractFileClassLoader = _ + var myClassLoader: AbstractFileClassLoader = uninitialized /** (value, maxElements, maxCharacters) => String */ - var myReplStringOf: (Object, Int, Int) => String = _ + var myReplStringOf: (Object, Int, Int) => String = uninitialized /** Class loader used to load compiled code */ private[repl] def classLoader()(using Context) = diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 2471f6bece42..6bb3d21dd45f 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -36,6 +36,7 @@ import org.jline.reader._ import scala.annotation.tailrec import scala.collection.mutable +import scala.compiletime.uninitialized import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import scala.util.Using @@ -116,10 +117,10 @@ class ReplDriver(settings: Array[String], rendering = new Rendering(classLoader) } - private var rootCtx: Context = _ - private var shouldStart: Boolean = _ - private var compiler: ReplCompiler = _ - protected var rendering: Rendering = _ + private var rootCtx: Context = uninitialized + private var shouldStart: Boolean = uninitialized + private var compiler: ReplCompiler = uninitialized + protected var rendering: Rendering = uninitialized // initialize the REPL session as part of the constructor so that once `run` // is called, we're in business diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala index 03b61c393d35..a96a2765d56a 100644 --- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala @@ -17,6 +17,7 @@ import dotty.Properties import interfaces.Diagnostic.{ERROR, WARNING} import scala.io.Codec +import scala.compiletime.uninitialized class TestReporter protected (outWriter: PrintWriter, logLevel: Int) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering { @@ -81,9 +82,9 @@ object TestReporter { private val failedTestsFileName: String = "last-failed.log" private val failedTestsFile: JFile = new JFile(s"$testLogsDirName/$failedTestsFileName") - private var outFile: JFile = _ - private var logWriter: PrintWriter = _ - private var failedTestsWriter: PrintWriter = _ + private var outFile: JFile = uninitialized + private var logWriter: PrintWriter = uninitialized + private var failedTestsWriter: PrintWriter = uninitialized private def initLog() = if (logWriter eq null) { val date = new Date diff --git a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala index 8e9a27e766b4..9047bb6737dc 100644 --- a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala +++ b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala @@ -14,6 +14,7 @@ import scala.concurrent.duration.Duration import scala.concurrent.{ Await, Future } import scala.concurrent.ExecutionContext.Implicits.global import scala.collection.mutable +import scala.compiletime.uninitialized /** Vulpix spawns JVM subprocesses (`numberOfSlaves`) in order to run tests * without compromising the main JVM @@ -70,8 +71,8 @@ trait RunnerOrchestration { withRunner(_.runMain(classPath)) private class Runner(private var process: Process) { - private var childStdout: BufferedReader = _ - private var childStdin: PrintStream = _ + private var childStdout: BufferedReader = uninitialized + private var childStdin: PrintStream = uninitialized /** Checks if `process` is still alive * From 276a20b99bd91bf02c330db442262fc90f694cce Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 2 Nov 2023 15:02:43 +0100 Subject: [PATCH 021/216] Remove unnecessary guard from migration warnings --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 5073cbad9b6a..c845ea8f74c7 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3518,7 +3518,7 @@ object Parsers { /** ‘*' | ‘_' */ def wildcardSelector() = - if in.token == USCORE && sourceVersion.isAtLeast(future) then + if in.token == USCORE then report.errorOrMigrationWarning( em"`_` is no longer supported for a wildcard $exprName; use `*` instead${rewriteNotice(`future-migration`)}", in.sourcePos(), @@ -3538,7 +3538,7 @@ object Parsers { /** id [‘as’ (id | ‘_’) */ def namedSelector(from: Ident) = if in.token == ARROW || isIdent(nme.as) then - if in.token == ARROW && sourceVersion.isAtLeast(future) then + if in.token == ARROW then report.errorOrMigrationWarning( em"The $exprName renaming `a => b` is no longer supported ; use `a as b` instead${rewriteNotice(`future-migration`)}", in.sourcePos(), From dc1e35a829b24240044d096b18e1c614a60ed6ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Thu, 2 Nov 2023 17:42:03 +0100 Subject: [PATCH 022/216] Completions should prepend, not replace as it is for Scala 2 (#18803) By default, in Scala 2 completion prepend and only replace if there is exact match. This PR unifies the experience between the Scala versions. It seems more intuitive to work that way. The change will be as follows (the order is: new logic, old logic and scala 2 logic): https://github.com/lampepfl/dotty/assets/48657087/c037c322-5613-4b95-a6e5-090b4e8827b6 In the future, it should be improved by changing implementation to use `InsertReplaceEdit` instead of `TextEdit`. This will allow users to decide which behaviour they prefer, but this has to be first implemented in metals to properly handle the new logic, but for now the key is to unify behaviours. https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#insertReplaceEdit --- .../tools/pc/completions/CompletionPos.scala | 5 +-- .../pc/completions/CompletionProvider.scala | 27 +++++------- .../pc/tests/completion/CompletionSuite.scala | 44 +++++++++++++++++++ 3 files changed, 57 insertions(+), 19 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala index 9ce7939c10fa..29699bd05203 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala @@ -29,10 +29,9 @@ case class CompletionPos( ): def sourcePos: SourcePosition = cursorPos.withSpan(Spans.Span(start, end)) + def stripSuffixEditRange: l.Range = new l.Range(cursorPos.offsetToPos(start), cursorPos.offsetToPos(end)) + def toEditRange: l.Range = cursorPos.withStart(start).withEnd(cursorPos.point).toLsp - def toEditRange: l.Range = - new l.Range(cursorPos.offsetToPos(start), cursorPos.offsetToPos(end)) - end toEditRange end CompletionPos object CompletionPos: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 78f4affe8c49..323f63050377 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -151,10 +151,7 @@ class CompletionProvider( indexedContext: IndexedContext )(using ctx: Context): CompletionItem = val printer = - ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using - indexedContext - ) - val editRange = completionPos.toEditRange + ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedContext) // For overloaded signatures we get multiple symbols, so we need // to recalculate the description @@ -165,24 +162,22 @@ class CompletionProvider( val ident = completion.insertText.getOrElse(completion.label) def mkItem( - insertText: String, + newText: String, additionalEdits: List[TextEdit] = Nil, range: Option[LspRange] = None ): CompletionItem = - val nameEdit = new TextEdit( - range.getOrElse(editRange), - insertText - ) + val oldText = params.text.substring(completionPos.start, completionPos.end) + val editRange = if newText.startsWith(oldText) then completionPos.stripSuffixEditRange + else completionPos.toEditRange + + val textEdit = new TextEdit(range.getOrElse(editRange), newText) + val item = new CompletionItem(label) item.setSortText(f"${idx}%05d") item.setDetail(description) - item.setFilterText( - completion.filterText.getOrElse(completion.label) - ) - item.setTextEdit(nameEdit) - item.setAdditionalTextEdits( - (completion.additionalEdits ++ additionalEdits).asJava - ) + item.setFilterText(completion.filterText.getOrElse(completion.label)) + item.setTextEdit(textEdit) + item.setAdditionalTextEdits((completion.additionalEdits ++ additionalEdits).asJava) completion.insertMode.foreach(item.setInsertTextMode) completion diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 213dd7157293..a64a6dfac6a2 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1500,3 +1500,47 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Test def `prepend-instead-of-replace` = + checkEdit( + """|object O: + | printl@@println() + |""".stripMargin, + """|object O: + | printlnprintln() + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `prepend-instead-of-replace-duplicate-word` = + checkEdit( + """|object O: + | println@@println() + |""".stripMargin, + """|object O: + | printlnprintln() + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `replace-when-inside` = + checkEdit( + """|object O: + | print@@ln() + |""".stripMargin, + """|object O: + | println() + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `replace-exact-same` = + checkEdit( + """|object O: + | println@@() + |""".stripMargin, + """|object O: + | println() + |""".stripMargin, + assertSingleItem = false + ) + From 04a65dd2029627042a679845b10e68b7104bfc33 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 1 Nov 2023 23:01:07 +0100 Subject: [PATCH 023/216] prevent crash when extension not found --- compiler/src/dotty/tools/dotc/typer/Implicits.scala | 1 + compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5cba406a302e..a7e524c72bd7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -431,6 +431,7 @@ object Implicits: /** A failed search */ case class SearchFailure(tree: Tree) extends SearchResult { + require(tree.tpe.isInstanceOf[SearchFailureType], s"unexpected type for ${tree}") final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific] final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType] } diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ed688d182913..66c79658b6ab 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3625,8 +3625,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val remembered = // report AmbiguousReferences as priority, otherwise last error (errs.filter(_.msg.isInstanceOf[AmbiguousReference]) ++ errs).take(1) for err <- remembered do + val tree = if app.isEmpty then qual else app rememberSearchFailure(qual, - SearchFailure(app.withType(FailedExtension(app, selectionProto, err.msg)))) + SearchFailure(tree.withType(FailedExtension(tree, selectionProto, err.msg)))) catch case ex: TypeError => nestedFailure(ex) // try an implicit conversion or given extension From d19d0546711641448e6ddddc826e95ab64174d76 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 Nov 2023 14:15:13 +0100 Subject: [PATCH 024/216] restore intercepting UnpickleExceptions --- .../dotty/tools/dotc/core/SymbolLoaders.scala | 29 +++++++++++++------ 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 3969a09a69ee..ce72981752e0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -24,7 +24,7 @@ import ast.desugar import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser -import dotty.tools.tasty.TastyHeaderUnpickler +import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException} object SymbolLoaders { @@ -421,14 +421,25 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { def description(using Context): String = "TASTy file " + tastyFile.toString override def doComplete(root: SymDenotation)(using Context): Unit = - val (classRoot, moduleRoot) = rootDenots(root.asClass) - val tastyBytes = tastyFile.toByteArray - val unpickler = new tasty.DottyUnpickler(tastyBytes) - unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) - if mayLoadTreesFromTasty then - classRoot.classSymbol.rootTreeOrProvider = unpickler - moduleRoot.classSymbol.rootTreeOrProvider = unpickler - checkTastyUUID(tastyFile, tastyBytes) + try + val (classRoot, moduleRoot) = rootDenots(root.asClass) + val tastyBytes = tastyFile.toByteArray + val unpickler = new tasty.DottyUnpickler(tastyBytes) + unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) + if mayLoadTreesFromTasty then + classRoot.classSymbol.rootTreeOrProvider = unpickler + moduleRoot.classSymbol.rootTreeOrProvider = unpickler + checkTastyUUID(tastyFile, tastyBytes) + catch case e: RuntimeException => + val message = e match + case e: UnpickleException => + i"""TASTy file ${tastyFile.canonicalPath} could not be read, failing with: + | ${Option(e.getMessage).getOrElse("")}""" + case _ => + i"""TASTy file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass} + | ${Option(e.getMessage).getOrElse("")}""" + if (ctx.debug) e.printStackTrace() + throw IOException(message) private def checkTastyUUID(tastyFile: AbstractFile, tastyBytes: Array[Byte])(using Context): Unit = From a6c5b17f29bda7e6c0cadd01f12e8b344c9ef0d1 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 Nov 2023 14:15:31 +0100 Subject: [PATCH 025/216] improve error messages for mismatched tasty version --- .../src/dotty/tools/backend/jvm/CodeGen.scala | 4 +- .../dotty/tools/dotc/core/SymbolLoaders.scala | 4 +- .../dotc/core/tasty/TastyUnpickler.scala | 4 +- .../tools/tasty/TastyHeaderUnpickler.scala | 222 +++++++++++++--- .../tasty/TastyHeaderUnpicklerTest.scala | 240 ++++++++++++++++-- .../backend/jvm/GenBCode.scala | 4 +- 6 files changed, 405 insertions(+), 73 deletions(-) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index 9572777095e0..c70bb3fac60c 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -24,7 +24,7 @@ import StdNames.nme import java.io.DataOutputStream import java.nio.channels.ClosedByInterruptException -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } import scala.tools.asm import scala.tools.asm.tree._ @@ -94,7 +94,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( for (binary <- unit.pickled.get(claszSymbol.asClass)) { generatedTasty += GeneratedTasty(store, binary) val tasty = - val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val uuid = new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, binary()).readHeader() val lo = uuid.getMostSignificantBits val hi = uuid.getLeastSignificantBits diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index ce72981752e0..920b15973ffa 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -24,7 +24,7 @@ import ast.desugar import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser -import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException} +import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException, UnpicklerConfig} object SymbolLoaders { @@ -447,7 +447,7 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { val className = tastyFile.name.stripSuffix(".tasty") tastyFile.resolveSibling(className + ".class") if classfile != null then - val tastyUUID = new TastyHeaderUnpickler(tastyBytes).readHeader() + val tastyUUID = new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, tastyBytes).readHeader() new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) else // This will be the case in any of our tests that compile with `-Youtput-only-tasty` diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 70bdec7780e2..44802b4dbd46 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -4,7 +4,7 @@ package tasty import scala.language.unsafeNulls -import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler} +import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig} import TastyFormat.NameTags._, TastyFormat.nameTagToString import TastyBuffer.NameRef @@ -88,7 +88,7 @@ class TastyUnpickler(reader: TastyReader) { result } - new TastyHeaderUnpickler(reader).readHeader() + new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, reader).readHeader() locally { until(readEnd()) { nameAtRef.add(readNameContents()) } diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index c0ed5dbd58fa..0a7f67d4da96 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -3,6 +3,7 @@ package dotty.tools.tasty import java.util.UUID import TastyFormat.{MajorVersion, MinorVersion, ExperimentalVersion, header} +import TastyHeaderUnpickler.TastyVersion /** * The Tasty Header consists of four fields: @@ -27,12 +28,99 @@ sealed abstract case class TastyHeader( toolingVersion: String ) -class TastyHeaderUnpickler(reader: TastyReader) { +trait UnpicklerConfig { + /** The TASTy major version that this reader supports */ + def majorVersion: Int + /** The TASTy minor version that this reader supports */ + def minorVersion: Int + /** The TASTy experimental version that this reader supports */ + def experimentalVersion: Int + /** The description of the upgraded tool that can read the given TASTy version */ + def upgradedReaderTool(version: TastyVersion): String + /** The description of the upgraded tool that can produce the given TASTy version */ + def upgradedProducerTool(version: TastyVersion): String + /** Additional information to help a user fix the outdated TASTy problem */ + def recompileAdditionalInfo: String + /** Additional information to help a user fix the more recent TASTy problem */ + def upgradeAdditionalInfo(fileVersion: TastyVersion): String +} + +object UnpicklerConfig { + + /** A config where its major, minor and experimental versions are fixed to those in TastyFormat */ + trait DefaultTastyVersion extends UnpicklerConfig { + override final def majorVersion: Int = MajorVersion + override final def minorVersion: Int = MinorVersion + override final def experimentalVersion: Int = ExperimentalVersion + } + + trait Scala3Compiler extends UnpicklerConfig { + private def asScala3Compiler(version: TastyVersion): String = + if (version.major == 28) { + // scala 3.x.y series + if (version.experimental > 0) + // scenario here is someone using 3.4.0 to read 3.4.1-RC1-NIGHTLY, in this case, we should show 3.4 nightly. + s"the same nightly or snapshot Scala 3.${version.minor - 1} compiler" + else s"a Scala 3.${version.minor}.0 compiler or newer" + } + else if (version.experimental > 0) "the same Scala compiler" // unknown major version, just say same + else "a more recent Scala compiler" // unknown major version, just say later + + /** The description of the upgraded scala compiler that can read the given TASTy version */ + final def upgradedReaderTool(version: TastyVersion): String = asScala3Compiler(version) + + /** The description of the upgraded scala compiler that can produce the given TASTy version */ + final def upgradedProducerTool(version: TastyVersion): String = asScala3Compiler(version) + + final def recompileAdditionalInfo: String = """ + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + + final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = + if (fileVersion.isExperimental && experimentalVersion == 0) { + """ + | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + } + else "" + } + + trait Generic extends UnpicklerConfig { + final def upgradedProducerTool(version: TastyVersion): String = + "a later version" + + final def upgradedReaderTool(version: TastyVersion): String = + if (version.isExperimental) s"the version of this tool compatible with TASTy ${version.show}" + else s"a newer version of this tool compatible with TASTy ${version.show}" + + final def recompileAdditionalInfo: String = """ + | Usually this means that the classpath entry of this file should be updated.""".stripMargin + + final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = + if (fileVersion.isExperimental && experimentalVersion == 0) { + """ + | Note that this tool does not support reading experimental TASTy.""".stripMargin + } + else "" + } + + /** A config for the TASTy reader of a scala 3 compiler */ + val scala3Compiler = new UnpicklerConfig with Scala3Compiler with DefaultTastyVersion {} + + /** A config for the TASTy reader of a generic tool */ + val generic = new UnpicklerConfig with Generic with DefaultTastyVersion {} +} + +class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { import TastyHeaderUnpickler._ import reader._ + def this(config: UnpicklerConfig, bytes: Array[Byte]) = this(config, new TastyReader(bytes)) + def this(reader: TastyReader) = this(UnpicklerConfig.generic, reader) def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + private val toolMajor: Int = config.majorVersion + private val toolMinor: Int = config.minorVersion + private val toolExperimental: Int = config.experimentalVersion + /** reads and verifies the TASTy version, extracting the UUID */ def readHeader(): UUID = readFullHeader().uuid @@ -45,8 +133,11 @@ class TastyHeaderUnpickler(reader: TastyReader) { val fileMajor = readNat() if (fileMajor <= 27) { // old behavior before `tasty-core` 3.0.0-M4 val fileMinor = readNat() - val signature = signatureString(fileMajor, fileMinor, 0) - throw new UnpickleException(signature + backIncompatAddendum + toolingAddendum) + val fileVersion = TastyVersion(fileMajor, fileMinor, 0) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + val signature = signatureString(fileVersion, toolVersion, what = "backward", tool = None) + val fix = recompileFix(toolVersion.minStable) + throw new UnpickleException(signature + fix) } else { val fileMinor = readNat() @@ -63,20 +154,38 @@ class TastyHeaderUnpickler(reader: TastyReader) { fileMajor = fileMajor, fileMinor = fileMinor, fileExperimental = fileExperimental, - compilerMajor = MajorVersion, - compilerMinor = MinorVersion, - compilerExperimental = ExperimentalVersion + compilerMajor = toolMajor, + compilerMinor = toolMinor, + compilerExperimental = toolExperimental ) check(validVersion, { - val signature = signatureString(fileMajor, fileMinor, fileExperimental) - val producedByAddendum = s"\nThe TASTy file was produced by $toolingVersion.$toolingAddendum" - val msg = ( - if (fileExperimental != 0) unstableAddendum - else if (fileMajor < MajorVersion) backIncompatAddendum - else forwardIncompatAddendum + // failure means that the TASTy file is can not be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) + + val what = if (compat < 0) "backward" else "forward" + val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler) + } + else upgradeFix(fileVersion) ) - signature + msg + producedByAddendum + signature + fix }) val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) @@ -89,40 +198,71 @@ class TastyHeaderUnpickler(reader: TastyReader) { private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) } + + private def signatureString( + fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { + val optProducedBy = tool.fold("")(t => s" produced by $t") + s"""TASTy file$optProducedBy has a $what incompatible TASTy version ${fileVersion.show}, + | expected ${toolVersion.validRange}. + |""".stripMargin + } + + private def recompileFix(producerVersion: TastyVersion) = { + val addendum = config.recompileAdditionalInfo + val newTool = config.upgradedProducerTool(producerVersion) + s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin + } + + private def upgradeFix(fileVersion: TastyVersion) = { + val addendum = config.upgradeAdditionalInfo(fileVersion) + val newTool = config.upgradedReaderTool(fileVersion) + s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin + } } object TastyHeaderUnpickler { - private def toolingAddendum = ( - if (ExperimentalVersion > 0) - "\nNote that your tooling is currently using an unstable TASTy version." - else - "" - ) - - private def signatureString(fileMajor: Int, fileMinor: Int, fileExperimental: Int) = { - def showMinorVersion(min: Int, exp: Int) = { - val expStr = if (exp == 0) "" else s" [unstable release: $exp]" - s"$min$expStr" - } - val minorVersion = showMinorVersion(MinorVersion, ExperimentalVersion) - val fileMinorVersion = showMinorVersion(fileMinor, fileExperimental) - s"""TASTy signature has wrong version. - | expected: {majorVersion: $MajorVersion, minorVersion: $minorVersion} - | found : {majorVersion: $fileMajor, minorVersion: $fileMinorVersion} - | - |""".stripMargin + private object Compatibility { + final val BackwardIncompatibleMajor = -3 + final val BackwardIncompatibleExperimental = -2 + final val ExperimentalRecompile = -1 + final val ExperimentalUpgrade = 1 + final val ForwardIncompatible = 2 + + /** Given that file can't be read, extract the reason */ + def failReason(file: TastyVersion, read: TastyVersion): Int = + if (file.major == read.major && file.minor == read.minor && file.isExperimental && read.isExperimental) { + if (file.experimental < read.experimental) ExperimentalRecompile // recompile library as compiler is too new + else ExperimentalUpgrade // they should upgrade compiler as library is too new + } + else if (file.major < read.major) + BackwardIncompatibleMajor // pre 3.0.0 + else if (file.isExperimental && file.major == read.major && file.minor <= read.minor) + // e.g. 3.4.0 reading 3.4.0-RC1-NIGHTLY, or 3.3.0 reading 3.0.2-RC1-NIGHTLY + BackwardIncompatibleExperimental + else ForwardIncompatible } - private def unstableAddendum = - """This TASTy file was produced by an unstable release. - |To read this TASTy file, your tooling must be at the same version.""".stripMargin + case class TastyVersion(major: Int, minor: Int, experimental: Int) { + def isExperimental: Boolean = experimental > 0 + + def nextStable: TastyVersion = copy(experimental = 0) - private def backIncompatAddendum = - """This TASTy file was produced by an earlier release that is not supported anymore. - |Please recompile this TASTy with a later version.""".stripMargin + def minStable: TastyVersion = copy(minor = 0, experimental = 0) - private def forwardIncompatAddendum = - """This TASTy file was produced by a more recent, forwards incompatible release. - |To read this TASTy file, please upgrade your tooling.""".stripMargin + def show: String = { + val suffix = if (isExperimental) s"-experimental-$experimental" else "" + s"$major.$minor$suffix" + } + + def kind: String = + if (isExperimental) "experimental TASTy" else "TASTy" + + def validRange: String = { + val min = TastyVersion(major, 0, 0) + val max = if (experimental == 0) this else TastyVersion(major, minor - 1, 0) + val extra = Option.when(experimental > 0)(this) + s"stable TASTy from ${min.show} to ${max.show}${extra.fold("")(e => s", or exactly ${e.show}")}" + } + } } diff --git a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala index 9f54c4b3061b..785ae9de297d 100644 --- a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala +++ b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala @@ -5,49 +5,230 @@ import org.junit.{Test, Ignore} import TastyFormat._ import TastyBuffer._ +import TastyHeaderUnpickler.TastyVersion -@Ignore // comment if you want to experiment with error messages class TastyHeaderUnpicklerTest { import TastyHeaderUnpicklerTest._ - @Test def vanilla: Unit = { - runTest(MajorVersion, MinorVersion, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345") + @Test + def okThisCompilerReadsItself: Unit = { + val file = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion) + val read = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion) + runTest(file, read, "Scala (current)") } - @Test def failBumpExperimental: Unit = { - (runTest(MajorVersion, MinorVersion, ExperimentalVersion + 1, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")) + @Test + def okExperimentalCompilerReadsItself: Unit = { + val file = TastyVersion(MajorVersion, MinorVersion, 1) + val read = TastyVersion(MajorVersion, MinorVersion, 1) + runTest(file, read, "Scala (current)") } - @Test def failBumpMinor: Unit = { - (runTest(MajorVersion, MinorVersion + 1, ExperimentalVersion, "Scala 3.1.0-RC1")) + @Test + def okStableCompilerReadsItself: Unit = { + val file = TastyVersion(MajorVersion, MinorVersion, 0) + val read = TastyVersion(MajorVersion, MinorVersion, 0) + runTest(file, read, "Scala (current)") } - @Test def failBumpMajor: Unit = { - (runTest(MajorVersion + 1, MinorVersion, ExperimentalVersion, "Scala 4.0.0-M1")) + @Test + def okReadOldStableMinorFromStable: Unit = { + val file = TastyVersion(28, 2, 0) + val read = TastyVersion(28, 3, 0) + runTest(file, read, "Scala 3.2.2") } - @Test def failBumpMajorFinal: Unit = { - (runTest(MajorVersion + 1, MinorVersion, 0, "Scala 4.0.0")) + @Test + def okReadOldStableMinorFromExperimental: Unit = { + val file = TastyVersion(28, 2, 0) + val read = TastyVersion(28, 3, 1) + runTest(file, read, "Scala 3.2.2") } - @Test def okSubtractExperimental: Unit = { - (runTest(MajorVersion, MinorVersion, ExperimentalVersion - 1, "Scala 3.0.0")) + @Test + def failReadExperimentalFromStableSameMinor: Unit = { + val file = TastyVersion(28, 4, 1) + val read = TastyVersion(28, 4, 0) + expectUnpickleError(runTest(file, read, "Scala 3.4.0-RC1-bin-SNAPSHOT")) { + """TASTy file produced by Scala 3.4.0-RC1-bin-SNAPSHOT has a backward incompatible TASTy version 28.4-experimental-1, + | expected stable TASTy from 28.0 to 28.4. + | The source of this file should be recompiled by a Scala 3.4.0 compiler or newer. + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + } + } + + @Test + def failReadExperimentalFromOldMinor: Unit = { + val file = TastyVersion(28, 3, 1) + val read = TastyVersion(28, 4, 0) + expectUnpickleError(runTest(file, read, "Scala 3.2.1-RC1-bin-SNAPSHOT")) { + """TASTy file produced by Scala 3.2.1-RC1-bin-SNAPSHOT has a backward incompatible TASTy version 28.3-experimental-1, + | expected stable TASTy from 28.0 to 28.4. + | The source of this file should be recompiled by a Scala 3.3.0 compiler or newer. + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + } + } + + @Test + def failReadOldMajor: Unit = { + val file = TastyVersion(27, 3, 0) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1")) { + """TASTy file has a backward incompatible TASTy version 27.3, + | expected stable TASTy from 28.0 to 28.3. + | The source of this file should be recompiled by a Scala 3.0.0 compiler or newer. + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + } + } + + @Test + def failReadOldMajor_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(27, 3, 0) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1", generic = true)) { + """TASTy file has a backward incompatible TASTy version 27.3, + | expected stable TASTy from 28.0 to 28.3. + | The source of this file should be recompiled by a later version. + | Usually this means that the classpath entry of this file should be updated.""".stripMargin + } + } + + @Test + def failReadOldExperimentalFromSameMinorWhileExperimental: Unit = { + val file = TastyVersion(28, 4, 1) + val read = TastyVersion(28, 4, 2) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY")) { + """TASTy file produced by Scala 3.3.3-RC1-NIGHTLY has a backward incompatible TASTy version 28.4-experimental-1, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. + | The source of this file should be recompiled by the same nightly or snapshot Scala 3.3 compiler. + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + } + } + + @Test + def failReadOldExperimentalFromSameMinorWhileExperimental_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(28, 4, 1) + val read = TastyVersion(28, 4, 2) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY", generic = true)) { + """TASTy file produced by Scala 3.3.3-RC1-NIGHTLY has a backward incompatible TASTy version 28.4-experimental-1, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. + | The source of this file should be recompiled by a later version. + | Usually this means that the classpath entry of this file should be updated.""".stripMargin + } + } + + @Test + def failReadNewerStableMinorFromStable: Unit = { + val file = TastyVersion(28, 3, 0) + val read = TastyVersion(28, 2, 0) + expectUnpickleError(runTest(file, read, "Scala 3.3.1")) { + """TASTy file produced by Scala 3.3.1 has a forward incompatible TASTy version 28.3, + | expected stable TASTy from 28.0 to 28.2. + | To read this TASTy file, use a Scala 3.3.0 compiler or newer.""".stripMargin + } + } + + @Test + def failReadNewerStableMinorFromStable_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(28, 3, 0) + val read = TastyVersion(28, 2, 0) + expectUnpickleError(runTest(file, read, "Scala 3.3.1", generic = true)) { + """TASTy file produced by Scala 3.3.1 has a forward incompatible TASTy version 28.3, + | expected stable TASTy from 28.0 to 28.2. + | To read this TASTy file, use a newer version of this tool compatible with TASTy 28.3.""".stripMargin + } } - @Test def okSubtractMinor: Unit = { - (runTest(MajorVersion, MinorVersion - 1, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")) + @Test + def failReadNewerExperimentalMinorFromStable: Unit = { + val file = TastyVersion(28, 3, 1) + val read = TastyVersion(28, 2, 0) + expectUnpickleError(runTest(file, read, "Scala 3.2.2-RC1-NIGHTLY")) { + """TASTy file produced by Scala 3.2.2-RC1-NIGHTLY has a forward incompatible TASTy version 28.3-experimental-1, + | expected stable TASTy from 28.0 to 28.2. + | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.2 compiler. + | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + } + } + + @Test + def failReadNewerStableMajor: Unit = { + val file = TastyVersion(29, 0, 0) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 4.0.0")) { + """TASTy file produced by Scala 4.0.0 has a forward incompatible TASTy version 29.0, + | expected stable TASTy from 28.0 to 28.3. + | To read this TASTy file, use a more recent Scala compiler.""".stripMargin + } + } + + @Test + def failReadNewerExperimentalMajor: Unit = { + val file = TastyVersion(29, 0, 1) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1")) { + """TASTy file produced by Scala 4.0.0-M1 has a forward incompatible TASTy version 29.0-experimental-1, + | expected stable TASTy from 28.0 to 28.3. + | To read this experimental TASTy file, use the same Scala compiler. + | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + } + } + + @Test + def failReadNewerExperimentalMajor_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(29, 0, 1) + val read = TastyVersion(28, 3, 0) + expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1", generic = true)) { + """TASTy file produced by Scala 4.0.0-M1 has a forward incompatible TASTy version 29.0-experimental-1, + | expected stable TASTy from 28.0 to 28.3. + | To read this experimental TASTy file, use the version of this tool compatible with TASTy 29.0-experimental-1. + | Note that this tool does not support reading experimental TASTy.""".stripMargin + } + } + + @Test + def failReadStableFromExperimentalSameMinor: Unit = { + val file = TastyVersion(28, 4, 0) + val read = TastyVersion(28, 4, 1) // 3.4.0-RC1-NIGHTLY + expectUnpickleError(runTest(file, read, "Scala 3.4.2")) { + """TASTy file produced by Scala 3.4.2 has a forward incompatible TASTy version 28.4, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. + | To read this TASTy file, use a Scala 3.4.0 compiler or newer.""".stripMargin + } + } + + @Test + def failReadNewerExperimentalFromExperimentalSameMinor: Unit = { + val file = TastyVersion(28, 4, 2) + val read = TastyVersion(28, 4, 1) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY")) { + """TASTy file produced by Scala 3.3.3-RC2-NIGHTLY has a forward incompatible TASTy version 28.4-experimental-2, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. + | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.3 compiler.""".stripMargin + } } - @Test def failSubtractMajor: Unit = { - (runTest(MajorVersion - 1, MinorVersion, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")) + @Test + def failReadNewerExperimentalFromExperimentalSameMinor_generic: Unit = { + // We check the generic version here because it will produce a different message. + val file = TastyVersion(28, 4, 2) + val read = TastyVersion(28, 4, 1) + expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY", generic = true)) { + """TASTy file produced by Scala 3.3.3-RC2-NIGHTLY has a forward incompatible TASTy version 28.4-experimental-2, + | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. + | To read this experimental TASTy file, use the version of this tool compatible with TASTy 28.4-experimental-2.""".stripMargin + } } } object TastyHeaderUnpicklerTest { - def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = { val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8) val buf = new TastyBuffer(header.length + 32 + compilerBytes.length) @@ -62,22 +243,33 @@ object TastyHeaderUnpicklerTest { buf } - def runTest(maj: Int, min: Int, exp: Int, compiler: String): Unit = { - val headerBuffer = fillHeader(maj, min, exp, compiler) - val bs = headerBuffer.bytes.clone + case class CustomScalaConfig(compilerVersion: TastyVersion) extends UnpicklerConfig.Scala3Compiler { + override def majorVersion: Int = compilerVersion.major + override def minorVersion: Int = compilerVersion.minor + override def experimentalVersion: Int = compilerVersion.experimental + } - val hr = new TastyHeaderUnpickler(bs) + case class CustomGenericConfig(compilerVersion: TastyVersion) extends UnpicklerConfig.Generic { + override def majorVersion: Int = compilerVersion.major + override def minorVersion: Int = compilerVersion.minor + override def experimentalVersion: Int = compilerVersion.experimental + } + def runTest(file: TastyVersion, read: TastyVersion, compiler: String, generic: Boolean = false): Unit = { + val headerBuffer = fillHeader(file.major, file.minor, file.experimental, compiler) + val bs = headerBuffer.bytes.clone + val config = if (generic) CustomGenericConfig(read) else CustomScalaConfig(read) + val hr = new TastyHeaderUnpickler(config, new TastyReader(bs)) hr.readFullHeader() } - def expectUnpickleError(op: => Unit) = { + def expectUnpickleError(op: => Unit)(message: String) = { try { op fail() } catch { - case err: UnpickleException => () + case err: UnpickleException => assert(err.getMessage.contains(message)) } } diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala index 71d007370fe7..8ca2eab9ea8a 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala @@ -26,7 +26,7 @@ import Decorators.em import java.io.DataOutputStream import java.nio.channels.ClosedByInterruptException -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } import scala.tools.asm import scala.tools.asm.Handle @@ -285,7 +285,7 @@ class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrim throw ex finally outstream.close() - val uuid = new TastyHeaderUnpickler(binary()).readHeader() + val uuid = new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, binary()).readHeader() val lo = uuid.getMostSignificantBits val hi = uuid.getLeastSignificantBits From 67351cb79e257439d21218d9cdb15fc4a13059f2 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 Nov 2023 16:41:35 +0100 Subject: [PATCH 026/216] add url to scala docs for tasty versioning --- .../tools/tasty/TastyHeaderUnpickler.scala | 8 ++- .../tasty/TastyHeaderUnpicklerTest.scala | 60 ++++++++++++++----- 2 files changed, 51 insertions(+), 17 deletions(-) diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index 0a7f67d4da96..ecfbda54d847 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -137,7 +137,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) val signature = signatureString(fileVersion, toolVersion, what = "backward", tool = None) val fix = recompileFix(toolVersion.minStable) - throw new UnpickleException(signature + fix) + throw new UnpickleException(signature + fix + tastyAddendum) } else { val fileMinor = readNat() @@ -185,7 +185,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { } else upgradeFix(fileVersion) ) - signature + fix + signature + fix + tastyAddendum }) val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) @@ -218,6 +218,10 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { val newTool = config.upgradedReaderTool(fileVersion) s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin } + + private def tastyAddendum: String = """ + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } object TastyHeaderUnpickler { diff --git a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala index 785ae9de297d..58805ce27aee 100644 --- a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala +++ b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala @@ -54,7 +54,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 3.4.0-RC1-bin-SNAPSHOT has a backward incompatible TASTy version 28.4-experimental-1, | expected stable TASTy from 28.0 to 28.4. | The source of this file should be recompiled by a Scala 3.4.0 compiler or newer. - | Usually this means that the library dependency containing this file should be updated.""".stripMargin + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -66,7 +68,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 3.2.1-RC1-bin-SNAPSHOT has a backward incompatible TASTy version 28.3-experimental-1, | expected stable TASTy from 28.0 to 28.4. | The source of this file should be recompiled by a Scala 3.3.0 compiler or newer. - | Usually this means that the library dependency containing this file should be updated.""".stripMargin + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -78,7 +82,9 @@ class TastyHeaderUnpicklerTest { """TASTy file has a backward incompatible TASTy version 27.3, | expected stable TASTy from 28.0 to 28.3. | The source of this file should be recompiled by a Scala 3.0.0 compiler or newer. - | Usually this means that the library dependency containing this file should be updated.""".stripMargin + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -91,7 +97,9 @@ class TastyHeaderUnpicklerTest { """TASTy file has a backward incompatible TASTy version 27.3, | expected stable TASTy from 28.0 to 28.3. | The source of this file should be recompiled by a later version. - | Usually this means that the classpath entry of this file should be updated.""".stripMargin + | Usually this means that the classpath entry of this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -103,7 +111,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 3.3.3-RC1-NIGHTLY has a backward incompatible TASTy version 28.4-experimental-1, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. | The source of this file should be recompiled by the same nightly or snapshot Scala 3.3 compiler. - | Usually this means that the library dependency containing this file should be updated.""".stripMargin + | Usually this means that the library dependency containing this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -116,7 +126,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 3.3.3-RC1-NIGHTLY has a backward incompatible TASTy version 28.4-experimental-1, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. | The source of this file should be recompiled by a later version. - | Usually this means that the classpath entry of this file should be updated.""".stripMargin + | Usually this means that the classpath entry of this file should be updated. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -127,7 +139,9 @@ class TastyHeaderUnpicklerTest { expectUnpickleError(runTest(file, read, "Scala 3.3.1")) { """TASTy file produced by Scala 3.3.1 has a forward incompatible TASTy version 28.3, | expected stable TASTy from 28.0 to 28.2. - | To read this TASTy file, use a Scala 3.3.0 compiler or newer.""".stripMargin + | To read this TASTy file, use a Scala 3.3.0 compiler or newer. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -139,7 +153,9 @@ class TastyHeaderUnpicklerTest { expectUnpickleError(runTest(file, read, "Scala 3.3.1", generic = true)) { """TASTy file produced by Scala 3.3.1 has a forward incompatible TASTy version 28.3, | expected stable TASTy from 28.0 to 28.2. - | To read this TASTy file, use a newer version of this tool compatible with TASTy 28.3.""".stripMargin + | To read this TASTy file, use a newer version of this tool compatible with TASTy 28.3. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -151,7 +167,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 3.2.2-RC1-NIGHTLY has a forward incompatible TASTy version 28.3-experimental-1, | expected stable TASTy from 28.0 to 28.2. | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.2 compiler. - | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + | Note that you are using a stable compiler, which can not read experimental TASTy. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -162,7 +180,9 @@ class TastyHeaderUnpicklerTest { expectUnpickleError(runTest(file, read, "Scala 4.0.0")) { """TASTy file produced by Scala 4.0.0 has a forward incompatible TASTy version 29.0, | expected stable TASTy from 28.0 to 28.3. - | To read this TASTy file, use a more recent Scala compiler.""".stripMargin + | To read this TASTy file, use a more recent Scala compiler. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -174,7 +194,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 4.0.0-M1 has a forward incompatible TASTy version 29.0-experimental-1, | expected stable TASTy from 28.0 to 28.3. | To read this experimental TASTy file, use the same Scala compiler. - | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + | Note that you are using a stable compiler, which can not read experimental TASTy. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -187,7 +209,9 @@ class TastyHeaderUnpicklerTest { """TASTy file produced by Scala 4.0.0-M1 has a forward incompatible TASTy version 29.0-experimental-1, | expected stable TASTy from 28.0 to 28.3. | To read this experimental TASTy file, use the version of this tool compatible with TASTy 29.0-experimental-1. - | Note that this tool does not support reading experimental TASTy.""".stripMargin + | Note that this tool does not support reading experimental TASTy. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -198,7 +222,9 @@ class TastyHeaderUnpicklerTest { expectUnpickleError(runTest(file, read, "Scala 3.4.2")) { """TASTy file produced by Scala 3.4.2 has a forward incompatible TASTy version 28.4, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. - | To read this TASTy file, use a Scala 3.4.0 compiler or newer.""".stripMargin + | To read this TASTy file, use a Scala 3.4.0 compiler or newer. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -209,7 +235,9 @@ class TastyHeaderUnpicklerTest { expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY")) { """TASTy file produced by Scala 3.3.3-RC2-NIGHTLY has a forward incompatible TASTy version 28.4-experimental-2, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. - | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.3 compiler.""".stripMargin + | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.3 compiler. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } @@ -221,7 +249,9 @@ class TastyHeaderUnpicklerTest { expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY", generic = true)) { """TASTy file produced by Scala 3.3.3-RC2-NIGHTLY has a forward incompatible TASTy version 28.4-experimental-2, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. - | To read this experimental TASTy file, use the version of this tool compatible with TASTy 28.4-experimental-2.""".stripMargin + | To read this experimental TASTy file, use the version of this tool compatible with TASTy 28.4-experimental-2. + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin } } From 7993b2993da0a8b23b2a5cb8e8f5a7e98db14e8e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 Nov 2023 17:50:54 +0100 Subject: [PATCH 027/216] move scala3Compiler config to compiler module --- .../src/dotty/tools/backend/jvm/CodeGen.scala | 5 +-- .../dotty/tools/dotc/core/SymbolLoaders.scala | 3 +- .../dotc/core/tasty/TastyUnpickler.scala | 36 ++++++++++++++++++- .../tasty/TastyHeaderUnpicklerTest.scala | 19 ++++++---- .../tools/tasty/TastyHeaderUnpickler.scala | 32 ----------------- .../backend/jvm/GenBCode.scala | 3 +- 6 files changed, 54 insertions(+), 44 deletions(-) rename {tasty/test/dotty/tools => compiler/test/dotty/tools/dotc/core}/tasty/TastyHeaderUnpicklerTest.scala (96%) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index c70bb3fac60c..4bf305f3387c 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -24,7 +24,8 @@ import StdNames.nme import java.io.DataOutputStream import java.nio.channels.ClosedByInterruptException -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } +import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } +import dotty.tools.dotc.core.tasty.TastyUnpickler import scala.tools.asm import scala.tools.asm.tree._ @@ -94,7 +95,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( for (binary <- unit.pickled.get(claszSymbol.asClass)) { generatedTasty += GeneratedTasty(store, binary) val tasty = - val uuid = new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, binary()).readHeader() + val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader() val lo = uuid.getMostSignificantBits val hi = uuid.getLeastSignificantBits diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 920b15973ffa..12eea3a26df4 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -25,6 +25,7 @@ import ast.desugar import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException, UnpicklerConfig} +import dotty.tools.dotc.core.tasty.TastyUnpickler object SymbolLoaders { @@ -447,7 +448,7 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { val className = tastyFile.name.stripSuffix(".tasty") tastyFile.resolveSibling(className + ".class") if classfile != null then - val tastyUUID = new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, tastyBytes).readHeader() + val tastyUUID = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, tastyBytes).readHeader() new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) else // This will be the case in any of our tests that compile with `-Youtput-only-tasty` diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 44802b4dbd46..59f5600ff44b 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -5,6 +5,7 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig} +import TastyHeaderUnpickler.TastyVersion import TastyFormat.NameTags._, TastyFormat.nameTagToString import TastyBuffer.NameRef @@ -24,6 +25,39 @@ object TastyUnpickler { def apply(ref: NameRef): TermName = names(ref.index) def contents: Iterable[TermName] = names } + + trait Scala3CompilerConfig extends UnpicklerConfig: + private def asScala3Compiler(version: TastyVersion): String = + if (version.major == 28) { + // scala 3.x.y series + if (version.experimental > 0) + // scenario here is someone using 3.4.0 to read 3.4.1-RC1-NIGHTLY, in this case, we should show 3.4 nightly. + s"the same nightly or snapshot Scala 3.${version.minor - 1} compiler" + else s"a Scala 3.${version.minor}.0 compiler or newer" + } + else if (version.experimental > 0) "the same Scala compiler" // unknown major version, just say same + else "a more recent Scala compiler" // unknown major version, just say later + + /** The description of the upgraded scala compiler that can read the given TASTy version */ + final def upgradedReaderTool(version: TastyVersion): String = asScala3Compiler(version) + + /** The description of the upgraded scala compiler that can produce the given TASTy version */ + final def upgradedProducerTool(version: TastyVersion): String = asScala3Compiler(version) + + final def recompileAdditionalInfo: String = """ + | Usually this means that the library dependency containing this file should be updated.""".stripMargin + + final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = + if (fileVersion.isExperimental && experimentalVersion == 0) { + """ + | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin + } + else "" + end Scala3CompilerConfig + + /** A config for the TASTy reader of a scala 3 compiler */ + val scala3CompilerConfig = new Scala3CompilerConfig with UnpicklerConfig.DefaultTastyVersion {} + } import TastyUnpickler._ @@ -88,7 +122,7 @@ class TastyUnpickler(reader: TastyReader) { result } - new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, reader).readHeader() + new TastyHeaderUnpickler(scala3CompilerConfig, reader).readHeader() locally { until(readEnd()) { nameAtRef.add(readNameContents()) } diff --git a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala similarity index 96% rename from tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala rename to compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala index 58805ce27aee..c722af979d76 100644 --- a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala @@ -1,11 +1,16 @@ -package dotty.tools.tasty +package dotty.tools.dotc.core.tasty import org.junit.Assert._ import org.junit.{Test, Ignore} -import TastyFormat._ -import TastyBuffer._ -import TastyHeaderUnpickler.TastyVersion +import dotty.tools.tasty.TastyFormat._ +import dotty.tools.tasty.TastyBuffer._ +import dotty.tools.tasty.TastyBuffer +import dotty.tools.tasty.TastyReader +import dotty.tools.tasty.UnpickleException +import dotty.tools.tasty.TastyHeaderUnpickler +import dotty.tools.tasty.TastyHeaderUnpickler.TastyVersion +import dotty.tools.tasty.UnpicklerConfig class TastyHeaderUnpicklerTest { @@ -260,7 +265,7 @@ class TastyHeaderUnpicklerTest { object TastyHeaderUnpicklerTest { def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = { - val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8) + val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8).nn val buf = new TastyBuffer(header.length + 32 + compilerBytes.length) for (ch <- header) buf.writeByte(ch.toByte) buf.writeNat(maj) @@ -273,7 +278,7 @@ object TastyHeaderUnpicklerTest { buf } - case class CustomScalaConfig(compilerVersion: TastyVersion) extends UnpicklerConfig.Scala3Compiler { + case class CustomScalaConfig(compilerVersion: TastyVersion) extends TastyUnpickler.Scala3CompilerConfig { override def majorVersion: Int = compilerVersion.major override def minorVersion: Int = compilerVersion.minor override def experimentalVersion: Int = compilerVersion.experimental @@ -299,7 +304,7 @@ object TastyHeaderUnpicklerTest { fail() } catch { - case err: UnpickleException => assert(err.getMessage.contains(message)) + case err: UnpickleException => assert(err.getMessage.nn.contains(message)) } } diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index ecfbda54d847..237544d99760 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -54,35 +54,6 @@ object UnpicklerConfig { override final def experimentalVersion: Int = ExperimentalVersion } - trait Scala3Compiler extends UnpicklerConfig { - private def asScala3Compiler(version: TastyVersion): String = - if (version.major == 28) { - // scala 3.x.y series - if (version.experimental > 0) - // scenario here is someone using 3.4.0 to read 3.4.1-RC1-NIGHTLY, in this case, we should show 3.4 nightly. - s"the same nightly or snapshot Scala 3.${version.minor - 1} compiler" - else s"a Scala 3.${version.minor}.0 compiler or newer" - } - else if (version.experimental > 0) "the same Scala compiler" // unknown major version, just say same - else "a more recent Scala compiler" // unknown major version, just say later - - /** The description of the upgraded scala compiler that can read the given TASTy version */ - final def upgradedReaderTool(version: TastyVersion): String = asScala3Compiler(version) - - /** The description of the upgraded scala compiler that can produce the given TASTy version */ - final def upgradedProducerTool(version: TastyVersion): String = asScala3Compiler(version) - - final def recompileAdditionalInfo: String = """ - | Usually this means that the library dependency containing this file should be updated.""".stripMargin - - final def upgradeAdditionalInfo(fileVersion: TastyVersion): String = - if (fileVersion.isExperimental && experimentalVersion == 0) { - """ - | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin - } - else "" - } - trait Generic extends UnpicklerConfig { final def upgradedProducerTool(version: TastyVersion): String = "a later version" @@ -102,9 +73,6 @@ object UnpicklerConfig { else "" } - /** A config for the TASTy reader of a scala 3 compiler */ - val scala3Compiler = new UnpicklerConfig with Scala3Compiler with DefaultTastyVersion {} - /** A config for the TASTy reader of a generic tool */ val generic = new UnpicklerConfig with Generic with DefaultTastyVersion {} } diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala index 8ca2eab9ea8a..1af7e5dd705a 100644 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala +++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala @@ -27,6 +27,7 @@ import java.io.DataOutputStream import java.nio.channels.ClosedByInterruptException import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } +import dotty.tools.tasty.core.TastyUnpickler import scala.tools.asm import scala.tools.asm.Handle @@ -285,7 +286,7 @@ class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrim throw ex finally outstream.close() - val uuid = new TastyHeaderUnpickler(UnpicklerConfig.scala3Compiler, binary()).readHeader() + val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader() val lo = uuid.getMostSignificantBits val hi = uuid.getLeastSignificantBits From 88dd1cab1875bfd17a9c731102fea824fe15a827 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 Nov 2023 17:51:43 +0100 Subject: [PATCH 028/216] fix typo --- tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index 237544d99760..05cecf240770 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -128,7 +128,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { ) check(validVersion, { - // failure means that the TASTy file is can not be read, therefore it is either: + // failure means that the TASTy file cannot be read, therefore it is either: // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor // version supported by this compiler // - any experimental in an older minor, in which case the library should be recompiled by the stable From 02ec718e4ad4e63b4152d9d71374c3e24e82adaf Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 2 Nov 2023 18:23:32 +0100 Subject: [PATCH 029/216] add explicit result types --- compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala | 2 +- tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 59f5600ff44b..679df42daca8 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -56,7 +56,7 @@ object TastyUnpickler { end Scala3CompilerConfig /** A config for the TASTy reader of a scala 3 compiler */ - val scala3CompilerConfig = new Scala3CompilerConfig with UnpicklerConfig.DefaultTastyVersion {} + val scala3CompilerConfig: UnpicklerConfig = new Scala3CompilerConfig with UnpicklerConfig.DefaultTastyVersion {} } diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index 05cecf240770..1a67913e68ca 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -74,7 +74,7 @@ object UnpicklerConfig { } /** A config for the TASTy reader of a generic tool */ - val generic = new UnpicklerConfig with Generic with DefaultTastyVersion {} + val generic: UnpicklerConfig = new UnpicklerConfig with Generic with DefaultTastyVersion {} } class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { From df4da02bcd947fb61991426c2f793a32c9bd1998 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 3 Nov 2023 11:44:11 +0100 Subject: [PATCH 030/216] tweak message header --- .../core/tasty/TastyHeaderUnpicklerTest.scala | 30 +++++++++---------- .../tools/tasty/TastyHeaderUnpickler.scala | 8 ++--- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala index c722af979d76..53c1f40638a4 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala @@ -56,7 +56,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 4, 1) val read = TastyVersion(28, 4, 0) expectUnpickleError(runTest(file, read, "Scala 3.4.0-RC1-bin-SNAPSHOT")) { - """TASTy file produced by Scala 3.4.0-RC1-bin-SNAPSHOT has a backward incompatible TASTy version 28.4-experimental-1, + """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.4.0-RC1-bin-SNAPSHOT, | expected stable TASTy from 28.0 to 28.4. | The source of this file should be recompiled by a Scala 3.4.0 compiler or newer. | Usually this means that the library dependency containing this file should be updated. @@ -70,7 +70,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 3, 1) val read = TastyVersion(28, 4, 0) expectUnpickleError(runTest(file, read, "Scala 3.2.1-RC1-bin-SNAPSHOT")) { - """TASTy file produced by Scala 3.2.1-RC1-bin-SNAPSHOT has a backward incompatible TASTy version 28.3-experimental-1, + """Backward incompatible TASTy file has version 28.3-experimental-1, produced by Scala 3.2.1-RC1-bin-SNAPSHOT, | expected stable TASTy from 28.0 to 28.4. | The source of this file should be recompiled by a Scala 3.3.0 compiler or newer. | Usually this means that the library dependency containing this file should be updated. @@ -84,7 +84,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(27, 3, 0) val read = TastyVersion(28, 3, 0) expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1")) { - """TASTy file has a backward incompatible TASTy version 27.3, + """Backward incompatible TASTy file has version 27.3, | expected stable TASTy from 28.0 to 28.3. | The source of this file should be recompiled by a Scala 3.0.0 compiler or newer. | Usually this means that the library dependency containing this file should be updated. @@ -99,7 +99,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(27, 3, 0) val read = TastyVersion(28, 3, 0) expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1", generic = true)) { - """TASTy file has a backward incompatible TASTy version 27.3, + """Backward incompatible TASTy file has version 27.3, | expected stable TASTy from 28.0 to 28.3. | The source of this file should be recompiled by a later version. | Usually this means that the classpath entry of this file should be updated. @@ -113,7 +113,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 4, 1) val read = TastyVersion(28, 4, 2) expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY")) { - """TASTy file produced by Scala 3.3.3-RC1-NIGHTLY has a backward incompatible TASTy version 28.4-experimental-1, + """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.3.3-RC1-NIGHTLY, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. | The source of this file should be recompiled by the same nightly or snapshot Scala 3.3 compiler. | Usually this means that the library dependency containing this file should be updated. @@ -128,7 +128,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 4, 1) val read = TastyVersion(28, 4, 2) expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY", generic = true)) { - """TASTy file produced by Scala 3.3.3-RC1-NIGHTLY has a backward incompatible TASTy version 28.4-experimental-1, + """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.3.3-RC1-NIGHTLY, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2. | The source of this file should be recompiled by a later version. | Usually this means that the classpath entry of this file should be updated. @@ -142,7 +142,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 3, 0) val read = TastyVersion(28, 2, 0) expectUnpickleError(runTest(file, read, "Scala 3.3.1")) { - """TASTy file produced by Scala 3.3.1 has a forward incompatible TASTy version 28.3, + """Forward incompatible TASTy file has version 28.3, produced by Scala 3.3.1, | expected stable TASTy from 28.0 to 28.2. | To read this TASTy file, use a Scala 3.3.0 compiler or newer. | Please refer to the documentation for information on TASTy versioning: @@ -156,7 +156,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 3, 0) val read = TastyVersion(28, 2, 0) expectUnpickleError(runTest(file, read, "Scala 3.3.1", generic = true)) { - """TASTy file produced by Scala 3.3.1 has a forward incompatible TASTy version 28.3, + """Forward incompatible TASTy file has version 28.3, produced by Scala 3.3.1, | expected stable TASTy from 28.0 to 28.2. | To read this TASTy file, use a newer version of this tool compatible with TASTy 28.3. | Please refer to the documentation for information on TASTy versioning: @@ -169,7 +169,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 3, 1) val read = TastyVersion(28, 2, 0) expectUnpickleError(runTest(file, read, "Scala 3.2.2-RC1-NIGHTLY")) { - """TASTy file produced by Scala 3.2.2-RC1-NIGHTLY has a forward incompatible TASTy version 28.3-experimental-1, + """Forward incompatible TASTy file has version 28.3-experimental-1, produced by Scala 3.2.2-RC1-NIGHTLY, | expected stable TASTy from 28.0 to 28.2. | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.2 compiler. | Note that you are using a stable compiler, which can not read experimental TASTy. @@ -183,7 +183,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(29, 0, 0) val read = TastyVersion(28, 3, 0) expectUnpickleError(runTest(file, read, "Scala 4.0.0")) { - """TASTy file produced by Scala 4.0.0 has a forward incompatible TASTy version 29.0, + """Forward incompatible TASTy file has version 29.0, produced by Scala 4.0.0, | expected stable TASTy from 28.0 to 28.3. | To read this TASTy file, use a more recent Scala compiler. | Please refer to the documentation for information on TASTy versioning: @@ -196,7 +196,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(29, 0, 1) val read = TastyVersion(28, 3, 0) expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1")) { - """TASTy file produced by Scala 4.0.0-M1 has a forward incompatible TASTy version 29.0-experimental-1, + """Forward incompatible TASTy file has version 29.0-experimental-1, produced by Scala 4.0.0-M1, | expected stable TASTy from 28.0 to 28.3. | To read this experimental TASTy file, use the same Scala compiler. | Note that you are using a stable compiler, which can not read experimental TASTy. @@ -211,7 +211,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(29, 0, 1) val read = TastyVersion(28, 3, 0) expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1", generic = true)) { - """TASTy file produced by Scala 4.0.0-M1 has a forward incompatible TASTy version 29.0-experimental-1, + """Forward incompatible TASTy file has version 29.0-experimental-1, produced by Scala 4.0.0-M1, | expected stable TASTy from 28.0 to 28.3. | To read this experimental TASTy file, use the version of this tool compatible with TASTy 29.0-experimental-1. | Note that this tool does not support reading experimental TASTy. @@ -225,7 +225,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 4, 0) val read = TastyVersion(28, 4, 1) // 3.4.0-RC1-NIGHTLY expectUnpickleError(runTest(file, read, "Scala 3.4.2")) { - """TASTy file produced by Scala 3.4.2 has a forward incompatible TASTy version 28.4, + """Forward incompatible TASTy file has version 28.4, produced by Scala 3.4.2, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. | To read this TASTy file, use a Scala 3.4.0 compiler or newer. | Please refer to the documentation for information on TASTy versioning: @@ -238,7 +238,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 4, 2) val read = TastyVersion(28, 4, 1) expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY")) { - """TASTy file produced by Scala 3.3.3-RC2-NIGHTLY has a forward incompatible TASTy version 28.4-experimental-2, + """Forward incompatible TASTy file has version 28.4-experimental-2, produced by Scala 3.3.3-RC2-NIGHTLY, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.3 compiler. | Please refer to the documentation for information on TASTy versioning: @@ -252,7 +252,7 @@ class TastyHeaderUnpicklerTest { val file = TastyVersion(28, 4, 2) val read = TastyVersion(28, 4, 1) expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY", generic = true)) { - """TASTy file produced by Scala 3.3.3-RC2-NIGHTLY has a forward incompatible TASTy version 28.4-experimental-2, + """Forward incompatible TASTy file has version 28.4-experimental-2, produced by Scala 3.3.3-RC2-NIGHTLY, | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1. | To read this experimental TASTy file, use the version of this tool compatible with TASTy 28.4-experimental-2. | Please refer to the documentation for information on TASTy versioning: diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index 1a67913e68ca..db07666d3be1 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -103,7 +103,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { val fileMinor = readNat() val fileVersion = TastyVersion(fileMajor, fileMinor, 0) val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) - val signature = signatureString(fileVersion, toolVersion, what = "backward", tool = None) + val signature = signatureString(fileVersion, toolVersion, what = "Backward", tool = None) val fix = recompileFix(toolVersion.minStable) throw new UnpickleException(signature + fix + tastyAddendum) } @@ -141,7 +141,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) - val what = if (compat < 0) "backward" else "forward" + val what = if (compat < 0) "Backward" else "Forward" val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) val fix = ( if (compat < 0) { @@ -169,8 +169,8 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { private def signatureString( fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { - val optProducedBy = tool.fold("")(t => s" produced by $t") - s"""TASTy file$optProducedBy has a $what incompatible TASTy version ${fileVersion.show}, + val optProducedBy = tool.fold("")(t => s", produced by $t") + s"""$what incompatible TASTy file has version ${fileVersion.show}$optProducedBy, | expected ${toolVersion.validRange}. |""".stripMargin } From f430e449869d9d6b6cf05373086f3d52b0a11805 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 29 Sep 2023 11:59:01 +0200 Subject: [PATCH 031/216] error when reading class file with unknown newer jdk version --- .../core/classfile/ClassfileConstants.scala | 1 + .../dotc/core/classfile/ClassfileParser.scala | 31 ++++++++++--------- .../classfile/ClassfileTastyUUIDParser.scala | 15 +-------- 3 files changed, 19 insertions(+), 28 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 4aa60d973264..699a4cc787eb 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -11,6 +11,7 @@ object ClassfileConstants { inline val JAVA_MINOR_VERSION = 3 inline val JAVA8_MAJOR_VERSION = 52 + inline val JAVA_LATEST_MAJOR_VERSION = 65 /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html) * diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 349dbc445971..698efb058570 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -57,6 +57,22 @@ object ClassfileParser { } } + private[classfile] def parseHeader(classfile: AbstractFile)(using in: DataReader): Unit = { + val magic = in.nextInt + if (magic != JAVA_MAGIC) + throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") + val minorVersion = in.nextChar.toInt + val majorVersion = in.nextChar.toInt + if ((majorVersion < JAVA_MAJOR_VERSION) || + ((majorVersion == JAVA_MAJOR_VERSION) && + (minorVersion < JAVA_MINOR_VERSION))) + throw new IOException( + s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") + if majorVersion > JAVA_LATEST_MAJOR_VERSION then + throw new IOException( + s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, and was compiled by a newer JDK than supported by this Scala version, please update to a newer Scala version.") + } + abstract class AbstractConstantPool(using in: DataReader) { protected val len = in.nextChar protected val starts = new Array[Int](len) @@ -259,7 +275,7 @@ class ClassfileParser( def run()(using Context): Option[Embedded] = try ctx.base.reusableDataReader.withInstance { reader => implicit val reader2 = reader.reset(classfile) report.debuglog("[class] >> " + classRoot.fullName) - parseHeader() + parseHeader(classfile) this.pool = new ConstantPool val res = parseClass() this.pool = null @@ -273,19 +289,6 @@ class ClassfileParser( |${Option(e.getMessage).getOrElse("")}""") } - private def parseHeader()(using in: DataReader): Unit = { - val magic = in.nextInt - if (magic != JAVA_MAGIC) - throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") - val minorVersion = in.nextChar.toInt - val majorVersion = in.nextChar.toInt - if ((majorVersion < JAVA_MAJOR_VERSION) || - ((majorVersion == JAVA_MAJOR_VERSION) && - (minorVersion < JAVA_MINOR_VERSION))) - throw new IOException( - s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") - } - /** Return the class symbol of the given name. */ def classNameToSymbol(name: Name)(using Context): Symbol = val nameStr = name.toString diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala index 4c4885fd5313..1a3887abeae5 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala @@ -26,7 +26,7 @@ class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { def checkTastyUUID(tastyUUID: UUID)(using Context): Unit = try ctx.base.reusableDataReader.withInstance { reader => implicit val reader2 = reader.reset(classfile) - parseHeader() + ClassfileParser.parseHeader(classfile) this.pool = new ConstantPool checkTastyAttr(tastyUUID) this.pool = null @@ -39,19 +39,6 @@ class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { |${Option(e.getMessage).getOrElse("")}""") } - private def parseHeader()(using in: DataReader): Unit = { - val magic = in.nextInt - if (magic != JAVA_MAGIC) - throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") - val minorVersion = in.nextChar.toInt - val majorVersion = in.nextChar.toInt - if ((majorVersion < JAVA_MAJOR_VERSION) || - ((majorVersion == JAVA_MAJOR_VERSION) && - (minorVersion < JAVA_MINOR_VERSION))) - throw new IOException( - s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") - } - private def checkTastyAttr(tastyUUID: UUID)(using ctx: Context, in: DataReader): Unit = { in.nextChar // jflags in.nextChar // nameIdx From 2183bf97114c9478ebfc586cc6ffe5f1379a229d Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 31 Oct 2023 18:20:29 +0100 Subject: [PATCH 032/216] track JDK version only when runtime exception occurs --- .../core/classfile/ClassfileConstants.scala | 1 - .../dotc/core/classfile/ClassfileParser.scala | 37 +++++++++++++++---- .../classfile/ClassfileTastyUUIDParser.scala | 11 ++++-- 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 699a4cc787eb..4aa60d973264 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -11,7 +11,6 @@ object ClassfileConstants { inline val JAVA_MINOR_VERSION = 3 inline val JAVA8_MAJOR_VERSION = 52 - inline val JAVA_LATEST_MAJOR_VERSION = 65 /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html) * diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 698efb058570..a56ac695b57a 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -29,6 +29,28 @@ import scala.compiletime.uninitialized object ClassfileParser { + object Header: + opaque type Version = Long + + object Version: + val Unknown: Version = -1L + + def brokenVersionAddendum(classfileVersion: Version)(using Context): String = + if classfileVersion.exists then + val (maj, min) = (classfileVersion.majorVersion, classfileVersion.minorVersion) + val scalaVersion = config.Properties.versionNumberString + i""" (version $maj.$min), + | please check the JDK compatibility of your Scala version ($scalaVersion)""" + else + "" + + def apply(major: Int, minor: Int): Version = + (major.toLong << 32) | (minor.toLong & 0xFFFFFFFFL) + extension (version: Version) + def exists: Boolean = version != Unknown + def majorVersion: Int = (version >> 32).toInt + def minorVersion: Int = (version & 0xFFFFFFFFL).toInt + import ClassfileConstants._ /** Marker trait for unpicklers that can be embedded in classfiles. */ @@ -57,7 +79,7 @@ object ClassfileParser { } } - private[classfile] def parseHeader(classfile: AbstractFile)(using in: DataReader): Unit = { + private[classfile] def parseHeader(classfile: AbstractFile)(using in: DataReader): Header.Version = { val magic = in.nextInt if (magic != JAVA_MAGIC) throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") @@ -68,9 +90,7 @@ object ClassfileParser { (minorVersion < JAVA_MINOR_VERSION))) throw new IOException( s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") - if majorVersion > JAVA_LATEST_MAJOR_VERSION then - throw new IOException( - s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, and was compiled by a newer JDK than supported by this Scala version, please update to a newer Scala version.") + Header.Version(majorVersion, minorVersion) } abstract class AbstractConstantPool(using in: DataReader) { @@ -263,6 +283,7 @@ class ClassfileParser( protected var classTParams: Map[Name, Symbol] = Map() private var Scala2UnpicklingMode = Mode.Scala2Unpickling + private var classfileVersion: Header.Version = Header.Version.Unknown classRoot.info = NoLoader().withDecls(instanceScope) moduleRoot.info = NoLoader().withDecls(staticScope).withSourceModule(staticModule) @@ -275,7 +296,7 @@ class ClassfileParser( def run()(using Context): Option[Embedded] = try ctx.base.reusableDataReader.withInstance { reader => implicit val reader2 = reader.reset(classfile) report.debuglog("[class] >> " + classRoot.fullName) - parseHeader(classfile) + classfileVersion = parseHeader(classfile) this.pool = new ConstantPool val res = parseClass() this.pool = null @@ -284,9 +305,11 @@ class ClassfileParser( catch { case e: RuntimeException => if (ctx.debug) e.printStackTrace() + val addendum = Header.Version.brokenVersionAddendum(classfileVersion) throw new IOException( - i"""class file ${classfile.canonicalPath} is broken, reading aborted with ${e.getClass} - |${Option(e.getMessage).getOrElse("")}""") + i""" class file ${classfile.canonicalPath} is broken$addendum, + | reading aborted with ${e.getClass}: + | ${Option(e.getMessage).getOrElse("")}""") } /** Return the class symbol of the given name. */ diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala index 1a3887abeae5..0393744dde5c 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala @@ -14,6 +14,8 @@ import dotty.tools.dotc.util._ import dotty.tools.io.AbstractFile import dotty.tools.tasty.TastyReader +import ClassfileParser.Header + import java.io.IOException import java.lang.Integer.toHexString import java.util.UUID @@ -23,10 +25,11 @@ class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { import ClassfileConstants._ private var pool: ConstantPool = uninitialized // the classfile's constant pool + private var classfileVersion: Header.Version = Header.Version.Unknown def checkTastyUUID(tastyUUID: UUID)(using Context): Unit = try ctx.base.reusableDataReader.withInstance { reader => implicit val reader2 = reader.reset(classfile) - ClassfileParser.parseHeader(classfile) + this.classfileVersion = ClassfileParser.parseHeader(classfile) this.pool = new ConstantPool checkTastyAttr(tastyUUID) this.pool = null @@ -34,9 +37,11 @@ class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { catch { case e: RuntimeException => if (ctx.debug) e.printStackTrace() + val addendum = Header.Version.brokenVersionAddendum(classfileVersion) throw new IOException( - i"""class file ${classfile.canonicalPath} is broken, reading aborted with ${e.getClass} - |${Option(e.getMessage).getOrElse("")}""") + i""" class file ${classfile.canonicalPath} is broken$addendum, + | reading aborted with ${e.getClass}: + | ${Option(e.getMessage).getOrElse("")}""") } private def checkTastyAttr(tastyUUID: UUID)(using ctx: Context, in: DataReader): Unit = { From 81063dddf5ecb2b1f69ff86ee6cd640f35a76e4d Mon Sep 17 00:00:00 2001 From: Katarzyna Marek Date: Fri, 3 Nov 2023 16:28:02 +0100 Subject: [PATCH 033/216] bugfix: rename end marker --- .../src/main/dotty/tools/pc/PcCollector.scala | 40 +++++++++++++++++-- .../pc/PcDocumentHighlightProvider.scala | 2 +- .../tools/pc/PcInlineValueProviderImpl.scala | 15 ++++--- .../dotty/tools/pc/PcRenameProvider.scala | 2 +- .../tools/pc/PcSemanticTokensProvider.scala | 13 ++++-- .../tools/pc/tests/edit/PcRenameSuite.scala | 28 ++++++++++++- .../pc/tests/tokens/SemanticTokensSuite.scala | 8 ++++ .../tools/pc/utils/DefSymbolCollector.scala | 3 +- 8 files changed, 93 insertions(+), 18 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index 1f8eea8f4688..8ffd8ed28044 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -69,7 +69,7 @@ abstract class PcCollector[T]( case _ => rawPath def collect( parent: Option[Tree] - )(tree: Tree, pos: SourcePosition, symbol: Option[Symbol]): T + )(tree: Tree| EndMarker, pos: SourcePosition, symbol: Option[Symbol]): T /** * @return (adjusted position, should strip backticks) @@ -423,7 +423,7 @@ abstract class PcCollector[T]( parent: Option[Tree] ): Set[T] = def collect( - tree: Tree, + tree: Tree | EndMarker, pos: SourcePosition, symbol: Option[Symbol] = None ) = @@ -461,6 +461,9 @@ abstract class PcCollector[T]( case df: NamedDefTree if df.span.isCorrect && df.nameSpan.isCorrect && filter(df) && !isGeneratedGiven(df) => + def collectEndMarker = + EndMarker.getPosition(df, pos, sourceText).map: + collect(EndMarker(df.symbol), _) val annots = collectTrees(df.mods.annotations) val traverser = new PcCollector.DeepFolderWithParent[Set[T]]( @@ -470,7 +473,7 @@ abstract class PcCollector[T]( occurrences + collect( df, pos.withSpan(df.nameSpan) - ) + ) ++ collectEndMarker ) { case (set, tree) => traverser(set, tree) } @@ -635,3 +638,34 @@ case class ExtensionParamOccurence( sym: Symbol, methods: List[untpd.Tree] ) + +case class EndMarker(symbol: Symbol) + +object EndMarker: + /** + * Matches end marker line from start to the name's beginning. + * E.g. + * end /* some comment */ + */ + private val endMarkerRegex = """.*end(/\*.*\*/|\s)+""".r + def getPosition(df: NamedDefTree, pos: SourcePosition, sourceText: String)( + implicit ct: Context + ): Option[SourcePosition] = + val name = df.name.toString() + val endMarkerLine = + sourceText.slice(df.span.start, df.span.end).split('\n').last + val index = endMarkerLine.length() - name.length() + if index < 0 then None + else + val (possiblyEndMarker, possiblyEndMarkerName) = + endMarkerLine.splitAt(index) + Option.when( + possiblyEndMarkerName == name && + endMarkerRegex.matches(possiblyEndMarker) + )( + pos + .withStart(df.span.end - name.length()) + .withEnd(df.span.end) + ) + end getPosition +end EndMarker diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala index 71e36297cbba..aeb9480930f9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala @@ -19,7 +19,7 @@ final class PcDocumentHighlightProvider( def collect( parent: Option[Tree] )( - tree: Tree, + tree: Tree | EndMarker, toAdjust: SourcePosition, sym: Option[Symbol] ): DocumentHighlight = diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala index afb858ab3242..2d4a9d8643c9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala @@ -24,7 +24,7 @@ import org.eclipse.lsp4j as l final class PcInlineValueProviderImpl( val driver: InteractiveDriver, val params: OffsetParams -) extends PcCollector[Occurence](driver, params) +) extends PcCollector[Option[Occurence]](driver, params) with InlineValueProvider: val text = params.text.toCharArray() @@ -32,16 +32,19 @@ final class PcInlineValueProviderImpl( val position: l.Position = pos.toLsp.getStart() override def collect(parent: Option[Tree])( - tree: Tree, + tree: Tree | EndMarker, pos: SourcePosition, sym: Option[Symbol] - ): Occurence = - val (adjustedPos, _) = adjust(pos) - Occurence(tree, parent, adjustedPos) + ): Option[Occurence] = + tree match + case tree: Tree => + val (adjustedPos, _) = adjust(pos) + Some(Occurence(tree, parent, adjustedPos)) + case _ => None override def defAndRefs(): Either[String, (Definition, List[Reference])] = val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) - val allOccurences = result() + val allOccurences = result().flatten for definition <- allOccurences .collectFirst { case Occurence(defn: ValDef, _, pos) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala index 4477529d7124..56924f3cfded 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala @@ -34,7 +34,7 @@ final class PcRenameProvider( def collect( parent: Option[Tree] - )(tree: Tree, toAdjust: SourcePosition, sym: Option[Symbol]): l.TextEdit = + )(tree: Tree | EndMarker, toAdjust: SourcePosition, sym: Option[Symbol]): l.TextEdit = val (pos, stripBackticks) = adjust(toAdjust, forRename = true) l.TextEdit( pos.toLsp, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala index 5f47b4d0d8bb..d70fa32c2b10 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala @@ -32,7 +32,7 @@ final class PcSemanticTokensProvider( * 3. type parameters, * In all those cases we don't have a specific value for sure. */ - private def isDeclaration(tree: Tree) = tree match + private def isDeclaration(tree: Tree | EndMarker) = tree match case df: ValOrDefDef => df.rhs.isEmpty case df: TypeDef => df.rhs match @@ -49,7 +49,8 @@ final class PcSemanticTokensProvider( * that the compiler sees them as vals, as it's not clear * if they should be declaration/definition at all. */ - private def isDefinition(tree: Tree) = tree match + private def isDefinition(tree: Tree | EndMarker) = tree match + case _: EndMarker => true case df: Bind => true case df: ValOrDefDef => !df.rhs.isEmpty && !df.symbol.isAllOf(Flags.EnumCase) @@ -62,8 +63,12 @@ final class PcSemanticTokensProvider( object Collector extends PcCollector[Option[Node]](driver, params): override def collect( parent: Option[Tree] - )(tree: Tree, pos: SourcePosition, symbol: Option[Symbol]): Option[Node] = - val sym = symbol.fold(tree.symbol)(identity) + )(tree: Tree | EndMarker, pos: SourcePosition, symbol: Option[Symbol]): Option[Node] = + val sym = + tree match + case tree: Tree => + symbol.fold(tree.symbol)(identity) + case EndMarker(sym) => sym if !pos.exists || sym == null || sym == NoSymbol then None else Some( diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala index 256b0cb1075a..23c81fcf515a 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala @@ -478,9 +478,33 @@ class PcRenameSuite extends BasePcRenameSuite: | def <>(f: Int => Int): Bar = Bar(x.map(f)) |} | - |val f = + |val f = | for { | b <- Bar(List(1,2,3)) | } yield b - |""".stripMargin, + |""".stripMargin + ) + + @Test def `end-marker` = + check( + """|def <>(a: Int) = + | ??? + |end <> + |""".stripMargin + ) + + @Test def `end-marker-with-comment` = + check( + """|def <>(a: Int) = + | ??? + |end /* a comment */ <> /* a comment */ + |""".stripMargin + ) + + @Test def `end-marker-wrong` = + check( + """|def <> = + | def bar = + | ??? + | end bar""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala index c1a36bc59ed4..9ef153e51da1 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala @@ -343,3 +343,11 @@ class SemanticTokensSuite extends BaseSemanticTokensSuite: |} |""".stripMargin, ) + + @Test def `end-marker` = + check( + """|def <>/*method,definition*/ = + | 1 + |end <>/*method,definition*/ + |""".stripMargin, + ) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala index 168ccb033423..0171d2a0d76d 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala @@ -8,6 +8,7 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.PcCollector +import dotty.tools.pc.EndMarker final class DefSymbolCollector( driver: InteractiveDriver, @@ -15,7 +16,7 @@ final class DefSymbolCollector( ) extends PcCollector[Option[Symbol]](driver, params): def collect(parent: Option[Tree])( - tree: Tree, + tree: Tree | EndMarker, toAdjust: SourcePosition, sym: Option[Symbol] ): Option[Symbol] = From 51be8863fc1a5b1396b14e2e1d59ff3ab43263da Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Fri, 3 Nov 2023 11:53:15 -0400 Subject: [PATCH 034/216] Fix i18629 --- compiler/src/dotty/tools/dotc/transform/init/Objects.scala | 5 +++-- tests/init-global/pos/i18629.scala | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 tests/init-global/pos/i18629.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 8556ced63008..53c210a0f0e4 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -1418,7 +1418,7 @@ object Objects: val applyDenot = getMemberMethod(scrutineeType, nme.apply, applyType(elemType)) val applyRes = call(scrutinee, applyDenot.symbol, TraceValue(Bottom, summon[Trace]) :: Nil, scrutineeType, superType = NoType, needResolve = true) - if isWildcardStarArg(pats.last) then + if isWildcardStarArgList(pats) then if pats.size == 1 then // call .toSeq val toSeqDenot = scrutineeType.member(nme.toSeq).suchThat(_.info.isParameterless) @@ -1433,7 +1433,8 @@ object Objects: end if else // no patterns like `xs*` - for pat <- pats do evalPattern(applyRes, pat) + for pat <- pats do evalPattern(applyRes, pat) + end if end evalSeqPatterns diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala new file mode 100644 index 000000000000..029319a5785b --- /dev/null +++ b/tests/init-global/pos/i18629.scala @@ -0,0 +1,5 @@ +object Foo { + val bar = List() match { + case List() => ??? + } +} From f0d76ca2b6b02d2f960931a74b87d945b12d8909 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 3 Nov 2023 17:08:03 +0100 Subject: [PATCH 035/216] error for refutable for-generator patterns in 3.4 --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 2 +- compiler/src/dotty/tools/dotc/typer/Checking.scala | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index c845ea8f74c7..b8d39186bc50 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2771,7 +2771,7 @@ object Parsers { atSpan(startOffset(pat), accept(LARROW)) { val checkMode = if casePat then GenCheckMode.FilterAlways - else if sourceVersion.isAtLeast(`future`) then GenCheckMode.Check + else if sourceVersion.isAtLeast(`3.4`) then GenCheckMode.Check else if sourceVersion.isAtLeast(`3.2`) then GenCheckMode.CheckAndFilter else GenCheckMode.FilterNow // filter on source version < 3.2, for backward compat GenFrom(pat, subExpr(), checkMode) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index c8026ad5784b..dc0f4d729789 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -923,7 +923,13 @@ trait Checking { | |If $usage is intentional, this can be communicated by $fix, |which $addendum.$rewriteMsg"""), - pos, warnFrom = `3.2`, errorFrom = `future`) + pos, + warnFrom = `3.2`, + // we tighten for-comprehension without `case` to error in 3.4, + // but we keep pat-defs as warnings for now ("@unchecked"), + // until we propose an alternative way to assert exhaustivity to the typechecker. + errorFrom = if isPatDef then `future` else `3.4` + ) false } From 9b6184bf0f87847b552ef64a019e1f6d0167a8c8 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Fri, 3 Nov 2023 12:18:04 -0400 Subject: [PATCH 036/216] Fix i18628 --- .../src/dotty/tools/dotc/transform/init/Objects.scala | 2 +- tests/init-global/pos/i18628.scala | 7 +++++++ tests/init-global/pos/i18628_2.scala | 7 +++++++ tests/init-global/pos/i18628_3.scala | 9 +++++++++ 4 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 tests/init-global/pos/i18628.scala create mode 100644 tests/init-global/pos/i18628_2.scala create mode 100644 tests/init-global/pos/i18628_3.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 8556ced63008..54712c3b12fb 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -589,7 +589,7 @@ object Objects: values.map(ref => ref.widen(height)).join case Fun(code, thisV, klass, env) => - Fun(code, thisV.widenRefOrCold(height), klass, env.widen(height)) + Fun(code, thisV.widenRefOrCold(height), klass, env.widen(height - 1)) case ref @ OfClass(klass, outer, _, args, env) => val outer2 = outer.widen(height - 1) diff --git a/tests/init-global/pos/i18628.scala b/tests/init-global/pos/i18628.scala new file mode 100644 index 000000000000..8ea91a381276 --- /dev/null +++ b/tests/init-global/pos/i18628.scala @@ -0,0 +1,7 @@ +object Test: + class Box(val x: Int) + + def recur(a: => Box, b: => Box): Int = + a.x + recur(a, b) + b.x + + recur(Box(1), Box(2)) \ No newline at end of file diff --git a/tests/init-global/pos/i18628_2.scala b/tests/init-global/pos/i18628_2.scala new file mode 100644 index 000000000000..272207fc1fd1 --- /dev/null +++ b/tests/init-global/pos/i18628_2.scala @@ -0,0 +1,7 @@ +object Test: + class Box(val x: Int) + + def recur(a: => Box, b: Box): Int = + a.x + recur(a, b) + b.x + + recur(Box(1), Box(2)) diff --git a/tests/init-global/pos/i18628_3.scala b/tests/init-global/pos/i18628_3.scala new file mode 100644 index 000000000000..22ac88cfc39d --- /dev/null +++ b/tests/init-global/pos/i18628_3.scala @@ -0,0 +1,9 @@ +import scala.annotation.init.widen + +object Test: + class Box(val x: Int) + + def recur(a: => Box, b: => Box): Int = + a.x + recur(a: @widen(5), b: @widen(5)) + b.x + + recur(Box(1), Box(2)) \ No newline at end of file From 20e2ca349f644bff42092fd406bf71bb62a25801 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Fri, 3 Nov 2023 12:26:50 -0400 Subject: [PATCH 037/216] move test --- tests/init-global/{pos => neg}/i18628.scala | 2 +- tests/init-global/{pos => neg}/i18628_2.scala | 2 +- tests/init-global/{pos => neg}/i18628_3.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) rename tests/init-global/{pos => neg}/i18628.scala (73%) rename tests/init-global/{pos => neg}/i18628_2.scala (73%) rename tests/init-global/{pos => neg}/i18628_3.scala (70%) diff --git a/tests/init-global/pos/i18628.scala b/tests/init-global/neg/i18628.scala similarity index 73% rename from tests/init-global/pos/i18628.scala rename to tests/init-global/neg/i18628.scala index 8ea91a381276..a89e98e8bd97 100644 --- a/tests/init-global/pos/i18628.scala +++ b/tests/init-global/neg/i18628.scala @@ -2,6 +2,6 @@ object Test: class Box(val x: Int) def recur(a: => Box, b: => Box): Int = - a.x + recur(a, b) + b.x + a.x + recur(a, b) + b.x // error recur(Box(1), Box(2)) \ No newline at end of file diff --git a/tests/init-global/pos/i18628_2.scala b/tests/init-global/neg/i18628_2.scala similarity index 73% rename from tests/init-global/pos/i18628_2.scala rename to tests/init-global/neg/i18628_2.scala index 272207fc1fd1..4ad428035441 100644 --- a/tests/init-global/pos/i18628_2.scala +++ b/tests/init-global/neg/i18628_2.scala @@ -2,6 +2,6 @@ object Test: class Box(val x: Int) def recur(a: => Box, b: Box): Int = - a.x + recur(a, b) + b.x + a.x + recur(a, b) + b.x // error recur(Box(1), Box(2)) diff --git a/tests/init-global/pos/i18628_3.scala b/tests/init-global/neg/i18628_3.scala similarity index 70% rename from tests/init-global/pos/i18628_3.scala rename to tests/init-global/neg/i18628_3.scala index 22ac88cfc39d..563d55b26f23 100644 --- a/tests/init-global/pos/i18628_3.scala +++ b/tests/init-global/neg/i18628_3.scala @@ -4,6 +4,6 @@ object Test: class Box(val x: Int) def recur(a: => Box, b: => Box): Int = - a.x + recur(a: @widen(5), b: @widen(5)) + b.x + a.x + recur(a: @widen(5), b: @widen(5)) + b.x // error recur(Box(1), Box(2)) \ No newline at end of file From e41cd40820a6ca0c2bfa5c440f6c85ca61a3071e Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 10:56:25 +0100 Subject: [PATCH 038/216] Some new tests - Some variants of typeclasses - Revised Pouring.scala with some namign improvements --- tests/pos/typeclasses.scala | 198 ++++++++++++++++++++++++++++++++++++ tests/run/Pouring.check | 3 +- tests/run/Pouring.scala | 48 ++++----- 3 files changed, 223 insertions(+), 26 deletions(-) create mode 100644 tests/pos/typeclasses.scala diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala new file mode 100644 index 000000000000..07fe5a31ce5d --- /dev/null +++ b/tests/pos/typeclasses.scala @@ -0,0 +1,198 @@ +class Common: + + // this should go in Predef + infix type at [A <: { type This}, B] = A { type This = B } + + trait Ord: + type This + extension (x: This) + def compareTo(y: This): Int + def < (y: This): Boolean = compareTo(y) < 0 + def > (y: This): Boolean = compareTo(y) > 0 + + trait SemiGroup: + type This + extension (x: This) def combine(y: This): This + + trait Monoid extends SemiGroup: + def unit: This + + trait Functor: + type This[A] + extension [A](x: This[A]) def map[B](f: A => B): This[B] + + trait Monad extends Functor: + def pure[A](x: A): This[A] + extension [A](x: This[A]) + def flatMap[B](f: A => This[B]): This[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + + +object Instances extends Common: + +/* + instance Int: Ord as intOrd with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 +*/ + given intOrd: Ord with + type This = Int + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 +/* + instance List[T: Ord]: Ord as listOrd with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) +*/ + + // Proposed short syntax: + // given listOrd[T: Ord as ord]: Ord at T with + given listOrd[T](using ord: Ord { type This = T}): Ord with + type This = List[T] + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + end listOrd + +/* + instance List: Monad as listMonad with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) +*/ + + given listMonad: Monad with + type This[A] = List[A] + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + +/* + type Reader[Ctx] = X =>> Ctx => X + instance Reader[Ctx: _]: Monad as readerMonad with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x +*/ + + given readerMonad[Ctx]: Monad with + type This[X] = Ctx => X + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + //Proposed short syntax: + //extension [M: Monad as m, A](xss: M[M[A]]) + // def flatten: M[A] = + // xs.flatMap(identity) + + extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) + def flatten: m.This[A] = + xss.flatMap(identity) + + // Proposed short syntax: + //def maximum[T: Ord](xs: List[T]: T = + def maximum[T](xs: List[T])(using Ord at T): T = + xs.reduceLeft((x, y) => if (x < y) y else x) + + // Proposed short syntax: + // def descending[T: Ord as asc]: Ord at T = new Ord: + def descending[T](using asc: Ord at T): Ord at T = new Ord: + type This = T + extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) + + // Proposed short syntax: + // def minimum[T: Ord](xs: List[T]) = + def minimum[T](xs: List[T])(using Ord at T) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 30 115 853 +// wc Rust : 57 193 1466 +trait Animal: + type This + // Associated function signature; `This` refers to the implementor type. + def apply(name: String): This + + // Method signatures; these will return a string. + extension (self: This) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +/* +instance Sheep: Animal with + def apply(name: String) = Sheep(name) + extension (self: This) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") +*/ + +// Implement the `Animal` trait for `Sheep`. +given Animal with + type This = Sheep + def apply(name: String) = Sheep(name) + extension (self: This) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/run/Pouring.check b/tests/run/Pouring.check index f07f29105c0b..c9ab84a226bb 100644 --- a/tests/run/Pouring.check +++ b/tests/run/Pouring.check @@ -1,2 +1 @@ -Vector(Empty(0), Empty(1), Fill(0), Fill(1), Pour(0,1), Pour(1,0)) -Fill(1) Pour(1,0) Empty(0) Pour(1,0) Fill(1) Pour(1,0) --> Vector(4, 6) +Illegal command line: more arguments expected diff --git a/tests/run/Pouring.scala b/tests/run/Pouring.scala index 6f4611af8bfc..5bb2a92ff8e3 100644 --- a/tests/run/Pouring.scala +++ b/tests/run/Pouring.scala @@ -1,37 +1,35 @@ -class Pouring(capacity: Vector[Int]): - type Glass = Int - type Content = Vector[Int] +type Glass = Int +type Levels = Vector[Int] - enum Move: - def apply(content: Content): Content = this match - case Empty(g) => content.updated(g, 0) - case Fill(g) => content.updated(g, capacity(g)) - case Pour(from, to) => - val amount = content(from) min (capacity(to) - content(to)) - extension (s: Content) def adjust(g: Glass, delta: Int) = s.updated(g, s(g) + delta) - content.adjust(from, -amount).adjust(to, amount) +class Pouring(capacity: Levels): + enum Move: case Empty(glass: Glass) case Fill(glass: Glass) case Pour(from: Glass, to: Glass) + + def apply(levels: Levels): Levels = this match + case Empty(glass) => + levels.updated(glass, 0) + case Fill(glass) => + levels.updated(glass, capacity(glass)) + case Pour(from, to) => + val amount = levels(from) min (capacity(to) - levels(to)) + levels.updated(from, levels(from) - amount) + .updated(to, levels(to) + amount) end Move + val glasses = 0 until capacity.length val moves = - val glasses = 0 until capacity.length - - (for g <- glasses yield Move.Empty(g)) + (for g <- glasses yield Move.Empty(g)) ++ (for g <- glasses yield Move.Fill(g)) ++ (for g1 <- glasses; g2 <- glasses if g1 != g2 yield Move.Pour(g1, g2)) - class Path(history: List[Move], val endContent: Content): + class Path(history: List[Move], val endContent: Levels): def extend(move: Move) = Path(move :: history, move(endContent)) override def toString = s"${history.reverse.mkString(" ")} --> $endContent" - end Path - - val initialContent: Content = capacity.map(x => 0) - val initialPath = Path(Nil, initialContent) - def from(paths: Set[Path], explored: Set[Content]): LazyList[Set[Path]] = + def from(paths: Set[Path], explored: Set[Levels]): LazyList[Set[Path]] = if paths.isEmpty then LazyList.empty else val extensions = @@ -44,6 +42,8 @@ class Pouring(capacity: Vector[Int]): paths #:: from(extensions, explored ++ extensions.map(_.endContent)) def solutions(target: Int): LazyList[Path] = + val initialContent: Levels = capacity.map(_ => 0) + val initialPath = Path(Nil, initialContent) for paths <- from(Set(initialPath), Set(initialContent)) path <- paths @@ -51,7 +51,7 @@ class Pouring(capacity: Vector[Int]): yield path end Pouring -@main def Test = - val problem = Pouring(Vector(4, 7)) - println(problem.moves) - println(problem.solutions(6).head) +@main def Test(target: Int, capacities: Int*) = + val problem = Pouring(capacities.toVector) + println(s"Moves: ${problem.moves}") + println(s"Solution: ${problem.solutions(target).headOption}") From 3640ff4263c7323d77036cf5eeedf60855d177f5 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 14:30:20 +0100 Subject: [PATCH 039/216] Don't recheck inherited trait parameters during capture checking The logic gets refused by the added capture refinements. --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 2 ++ compiler/src/dotty/tools/dotc/typer/RefChecks.scala | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index fab0689b4df2..ad216c47d2f5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1122,6 +1122,8 @@ class CheckCaptures extends Recheck, SymTransformer: override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = !setup.isPreCC(overriding) && !setup.isPreCC(overridden) + + override def checkInheritedTraitParameters: Boolean = false end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index eef88e76971e..af279844f370 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -267,6 +267,9 @@ object RefChecks { if !other.is(Deferred) then checkOverride(subtypeChecker, dcl, other) end checkAll + + // Disabled for capture checking since traits can get different parameter refinements + def checkInheritedTraitParameters: Boolean = true end OverridingPairsChecker /** 1. Check all members of class `clazz` for overriding conditions. @@ -851,7 +854,7 @@ object RefChecks { checkCaseClassInheritanceInvariant() } - if (!clazz.is(Trait)) { + if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) { // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // From b65fd3a4435f450f69c4e21b521c185f6b3f3e6c Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 14:31:11 +0100 Subject: [PATCH 040/216] Add SeqView to stdlib --- tests/pos-special/stdlib/collection/Seq.scala | 4 +- .../stdlib/collection/SeqView.scala | 231 ++++++++++++++++++ 2 files changed, 233 insertions(+), 2 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/SeqView.scala diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index caabf6fa6436..81f39b3538cf 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -77,7 +77,7 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) @@ -234,7 +234,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * * @return an iterator yielding the elements of this $coll in reversed order */ - def reverseIterator: Iterator[A] = reversed.iterator + override def reverseIterator: Iterator[A] = reversed.iterator /** Tests whether this $coll contains the given sequence at a given index. * diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala new file mode 100644 index 000000000000..f8f4f1e199de --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -0,0 +1,231 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the + * necessary functionality over which SeqViews are defined, and at the same + * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is + * pure, whereas SeqViews are Iterables which can be impure (for instance, + * mapping a SeqView with an impure function gives an impure view). + */ +trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { + self: SeqViewOps[A, CC, C]^ => + + def length: Int + def apply(x: Int): A + def appended[B >: A](elem: B): CC[B]^{this} + def prepended[B >: A](elem: B): CC[B]^{this} + def reverse: C^{this} + def sorted[B >: A](implicit ord: Ordering[B]): C^{this} + + def reverseIterator: Iterator[A]^{this} = reversed.iterator +} + +trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { + self: SeqView[A]^ => + + override def view: SeqView[A]^{this} = this + + override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a }) + + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this) + + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqView" +} + +object SeqView { + + /** A `SeqOps` whose collection type and collection type constructor are unknown */ + private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _] + + /** A view that doesn’t apply any transformation to an underlying sequence */ + @SerialVersionUID(3L) + class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { + def apply(idx: Int): A = underlying.apply(idx) + def length: Int = underlying.length + def iterator: Iterator[A]^{this} = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + def apply(idx: Int): B = f(underlying(idx)) + def length: Int = underlying.length + } + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] { + def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] { + def apply(idx: Int): A = { + val l = prefix.length + if (idx < l) prefix(idx) else suffix(idx - l) + } + def length: Int = prefix.length + suffix.length + } + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { + def apply(i: Int) = underlying.apply(size - 1 - i) + def length = underlying.size + def iterator: Iterator[A]^{this} = underlying.reverseIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] { + def apply(idx: Int): A = if (idx < n) { + underlying(idx) + } else { + throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})") + } + def length: Int = underlying.length min normN + } + + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + private[this] val delta = (underlying.size - (n max 0)) max 0 + def length = underlying.size - delta + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + delta) + } + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + def length = (underlying.size - normN) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + normN) + override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n) + } + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + private[this] val len = (underlying.size - (n max 0)) max 0 + def length = len + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i) + } + + @SerialVersionUID(3L) + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, + private[this] val len: Int, + ord: Ordering[B]) + extends SeqView[A] { + outer: Sorted[A, B]^ => + + // force evaluation immediately by calling `length` so infinite collections + // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls + def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) + + @SerialVersionUID(3L) + private[this] class ReverseSorted extends SeqView[A] { + private[this] lazy val _reversed = new SeqView.Reverse(_sorted) + + def apply(i: Int): A = _reversed.apply(i) + def length: Int = len + def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) + override def reverse: SeqView[A]^{outer} = outer + override protected def reversed: Iterable[A] = outer.unsafeAssumePure + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = + if (ord1 == Sorted.this.ord) outer.unsafeAssumePure + else if (ord1.isReverseOf(Sorted.this.ord)) this + else new Sorted(elems, len, ord1) + } + + @volatile private[this] var evaluated = false + + private[this] lazy val _sorted: Seq[A] = { + val res = { + val len = this.len + if (len == 0) Nil + else if (len == 1) List(underlying.head) + else { + val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] + underlying.copyToArray(arr) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it + // is safe because: + // - the ArraySeq is immutable, and items that are not of type A + // cannot be added to it + // - we know it only contains items of type A (and if this collection + // contains items of another type, we'd get a CCE anyway) + // - the cast doesn't actually do anything in the runtime because the + // type of A is not known and Array[_] is Array[AnyRef] + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + } + } + evaluated = true + underlying = null + res + } + + private[this] def elems: SomeSeqOps[A]^{this} = { + val orig = underlying + if (evaluated) _sorted else orig + } + + def apply(i: Int): A = _sorted.apply(i) + def length: Int = len + def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) + override def reverse: SeqView[A] = new ReverseSorted + // we know `_sorted` is either tiny or has efficient random access, + // so this is acceptable for `reversed` + override protected def reversed: Iterable[A] = new ReverseSorted + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = + if (ord1 == this.ord) this + else if (ord1.isReverseOf(this.ord)) reverse + else new Sorted(elems, len, ord1) + } +} + +/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] From 0ada1d4c3724ef7b0e8e27e36ebb26e1f7f83fe4 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 13:12:59 +0100 Subject: [PATCH 041/216] Add IndexedSeqView to lib --- .../stdlib/collection/IndexedSeq.scala | 6 +- .../stdlib/collection/IndexedSeqView.scala | 187 ++++++++++++++++++ 2 files changed, 191 insertions(+), 2 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/IndexedSeqView.scala diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index 6e8e2bd0dc66..a2d4cc942231 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -18,6 +18,8 @@ import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -33,7 +35,7 @@ trait IndexedSeq[+A] extends Seq[A] object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) /** Base trait for indexed Seq operations */ -trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self => def iterator: Iterator[A] = view.iterator @@ -86,7 +88,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) - override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)) + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala new file mode 100644 index 000000000000..41edbf4448c8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -0,0 +1,187 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import language.experimental.captureChecking + +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { + self: IndexedSeqViewOps[A, CC, C]^ => +} + +/** View defined in terms of indexing a range */ +trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { + self: IndexedSeqView[A]^ => + + override def view: IndexedSeqView[A]^{this} = this + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until) + + override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqViewOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqViewOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqViewOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeqView" +} + +object IndexedSeqView { + + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewIterator[A]^ => + private[this] var current = 0 + private[this] var remainder = self.length + override def knownSize: Int = remainder + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A]^{this} = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + + def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value + + val formatFrom = formatRange(from) + val formatUntil = formatRange(until) + remainder = Math.max(0, formatUntil - formatFrom) + current = current + formatFrom + this + } + } + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewReverseIterator[A]^ => + private[this] var remainder = self.length + private[this] var pos = remainder - 1 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(pos) + pos -= 1 + remainder -= 1 + r + } else Iterator.empty.next() + + // from < 0 means don't move pos, until < 0 means don't limit remainder + // + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + if (_hasNext) { + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until + } + else { + pos -= from // skip ahead + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } + else remainder -= from // ...otherwise just less the skip + } + } + this + } + } + + /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqViewOps[A] = IndexedSeqViewOps[A, AnyConstr, _] + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqViewOps[A]^) + extends SeqView.Id(underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqViewOps[A]^, elem: A) + extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqViewOps[A]^) + extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqViewOps[A]^, suffix: SomeIndexedSeqViewOps[A]^) + extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + extends SeqView.Take(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqViewOps[A]^, f: A => B) + extends SeqView.Map(underlying, f) with IndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqViewOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqViewOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} + +/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] From dbd0c0e0375b357242414003ec7b5e3fa589ee85 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 14:40:43 +0100 Subject: [PATCH 042/216] Add CheckedIndexedView to stdlib --- .../stdlib/collection/IndexedSeqView.scala | 30 ++--- .../mutable/CheckedIndexedSeqView.scala | 120 ++++++++++++++++++ 2 files changed, 135 insertions(+), 15 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala index 41edbf4448c8..a16e06fa707d 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeqView.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -43,9 +43,9 @@ trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqVie override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) - def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqViewOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqViewOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqViewOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix: String = "IndexedSeqView" @@ -125,46 +125,46 @@ object IndexedSeqView { } /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ - type SomeIndexedSeqViewOps[A] = IndexedSeqViewOps[A, AnyConstr, _] + type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _] @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqViewOps[A]^) + class Id[+A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Id(underlying) with IndexedSeqView[A] @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqViewOps[A]^, elem: A) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A) extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqViewOps[A]^) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^) extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqViewOps[A]^, suffix: SomeIndexedSeqViewOps[A]^) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^) extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.Take(underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqViewOps[A]^, n: Int) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqViewOps[A]^, f: A => B) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B) extends SeqView.Map(underlying, f) with IndexedSeqView[B] @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqViewOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { override def reverse: IndexedSeqView[A] = underlying match { case x: IndexedSeqView[A] => x case _ => super.reverse @@ -172,7 +172,7 @@ object IndexedSeqView { } @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqViewOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { protected val lo = from max 0 protected val hi = (until max 0) min underlying.length protected val len = (hi - lo) max 0 diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..152b6cc9ffc7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,120 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + this: CheckedIndexedSeqView[A]^ => + + protected val mutationCount: () => Int + + override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} From 8a60bc22861b77a9bf9226b70387367b4d69a68f Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 14:42:09 +0100 Subject: [PATCH 043/216] Add ArrayBuffer (unchecked) to stdlib --- .../collection/mutable/ArrayBuffer.scala | 403 ++++++++++++++++++ 1 file changed, 403 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala new file mode 100644 index 000000000000..e3ddeb71ef8e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -0,0 +1,403 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import java.util.Arrays + +import scala.annotation.nowarn +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable + +/** An implementation of the `Buffer` class using an array to + * represent the assembled sequence internally. Append, update and random + * access take constant time (amortized time). Prepends and removes are + * linear in the buffer size. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] + * section on `Array Buffers` for more information. + + * + * @tparam A the type of this arraybuffer's elements. + * + * @define Coll `mutable.ArrayBuffer` + * @define coll array buffer + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(-1582447879429021880L) +class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) + extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with IterableFactoryDefaults[A, ArrayBuffer] + with DefaultSerializable { + + def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) + + def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) + + @transient private[this] var mutationCount: Int = 0 + + // needs to be `private[collection]` or `protected[collection]` for parallel-collections + protected[collection] var array: Array[AnyRef] = initialElements + protected var size0 = initialSize + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) + } + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + /** Ensure that the internal array has at least `n` cells. */ + protected def ensureSize(n: Int): Unit = { + array = ArrayBuffer.ensureSize(array, size0, n) + } + + // TODO 3.T: should be `protected`, perhaps `protected[this]` + /** Ensure that the internal array has at least `n` additional cells more than `size0`. */ + private[mutable] def ensureAdditionalSize(n: Int): Unit = { + // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow + array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n) + } + + def sizeHint(size: Int): Unit = + if(size > length && size >= 1) ensureSize(size) + + /** Reduce length to `n`, nulling out all dropped elements */ + private def reduceToSize(n: Int): Unit = { + mutationCount += 1 + Arrays.fill(array, n, size0, null) + size0 = n + } + + /** Trims the ArrayBuffer to an appropriate size for the current + * number of elements (rounding up to the next natural size), + * which may replace the array by a shorter one. + * This allows releasing some unused memory. + */ + def trimToSize(): Unit = { + resize(length) + } + + /** Trims the `array` buffer size down to either a power of 2 + * or Int.MaxValue while keeping first `requiredLength` elements. + */ + private def resize(requiredLength: Int): Unit = + array = ArrayBuffer.downsize(array, requiredLength) + + @inline private def checkWithinBounds(lo: Int, hi: Int) = { + if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})") + if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})") + } + + def apply(n: Int): A = { + checkWithinBounds(n, n + 1) + array(n).asInstanceOf[A] + } + + def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index + 1) + mutationCount += 1 + array(index) = elem.asInstanceOf[AnyRef] + } + + def length = size0 + + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) + + override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer + + /** Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = reduceToSize(0) + + /** + * Clears this buffer and shrinks to @param size (rounding up to the next + * natural size) + * @param size + */ + def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { + clear() + resize(size) + this + } + + def addOne(elem: A): this.type = { + mutationCount += 1 + ensureAdditionalSize(1) + val oldSize = size0 + size0 = oldSize + 1 + this(oldSize) = elem + this + } + + // Overridden to use array copying for efficiency where possible. + override def addAll(elems: IterableOnce[A]): this.type = { + elems match { + case elems: ArrayBuffer[_] => + val elemsLength = elems.size0 + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } + case _ => super.addAll(elems) + } + this + } + + def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index) + mutationCount += 1 + ensureAdditionalSize(1) + Array.copy(array, index, array, index + 1, size0 - index) + size0 += 1 + this(index) = elem + } + + def prepend(elem: A): this.type = { + insert(0, elem) + this + } + + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + checkWithinBounds(index, index) + elems match { + case elems: collection.Iterable[A] => + val elemsLength = elems.size + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + val len = size0 + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the the same reference + val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") + size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy + } + case _ => insertAll(index, ArrayBuffer.from(elems)) + } + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int): A = { + checkWithinBounds(index, index + 1) + val res = this(index) + Array.copy(array, index + 1, array, index, size0 - (index + 1)) + reduceToSize(size0 - 1) + res + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = + if (count > 0) { + checkWithinBounds(index, index + count) + Array.copy(array, index + count, array, index, size0 - (index + count)) + reduceToSize(size0 - count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) + } + + @deprecated("Use 'this' instance instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def result(): this.type = this + + @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayBuffer" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } + this + } + + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) + + override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op) + + override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op) +} + +/** + * Factory object for the `ArrayBuffer` class. + * + * $factoryInfo + * + * @define coll array buffer + * @define Coll `mutable.ArrayBuffer` + */ +@SerialVersionUID(3L) +object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { + final val DefaultInitialSize = 16 + private[this] val emptyArray = new Array[AnyRef](0) + + def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + val k = coll.knownSize + if (k >= 0) { + // Avoid reallocation of buffer if length is known + val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + new ArrayBuffer[B](array, k) + } + else new ArrayBuffer[B] ++= coll + } + + def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + new GrowableBuilder[A, ArrayBuffer[A]](empty) { + override def sizeHint(size: Int): Unit = elems.ensureSize(size) + } + + def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + + /** + * @param arrayLen the length of the backing array + * @param targetLen the minimum length to resize up to + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeUp(arrayLen: Long, targetLen: Long): Int = { + if (targetLen <= arrayLen) -1 + else { + if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements") + IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue` + + val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) + math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt + } + } + // if necessary, copy (curSize elements of) the array to a new array of capacity n. + // Should use Array.copyOf(array, resizeEnsuring(array.length))? + private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = { + val newLen = resizeUp(array.length, targetSize) + if (newLen < 0) array + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, curSize) + res + } + } + + /** + * @param arrayLen the length of the backing array + * @param targetLen the length to resize down to, if smaller than `arrayLen` + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeDown(arrayLen: Int, targetLen: Int): Int = + if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) + private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { + val newLen = resizeDown(array.length, targetSize) + if (newLen < 0) array + else if (newLen == 0) emptyArray + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, targetSize) + res + } + } +} + +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) + extends AbstractIndexedSeqView[A] { + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this({ + val _array = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + } + }, () => 0) + } + + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] + + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length + override protected[this] def className = "ArrayBufferView" + + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} From 61bf13943e2d67bea391abbc526e294eb5fc52bc Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 15:01:58 +0100 Subject: [PATCH 044/216] Rename annotation source file --- .../unchecked/uncheckedCapabilityLeaks.scala | 12 ------------ .../annotation/unchecked/uncheckedCaptures.scala | 12 ++++++++++++ 2 files changed, 12 insertions(+), 12 deletions(-) create mode 100644 library/src/scala/annotation/unchecked/uncheckedCaptures.scala diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala index 477ac6d742f7..e69de29bb2d1 100644 --- a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -1,12 +0,0 @@ -package scala.annotation -package unchecked - -/** An annotation for mutable variables that are allowed to capture - * the root capability `cap`. Allowing this is not capture safe since - * it can cause leakage of capabilities from local scopes by assigning - * values retaining such capabilties to the annotated variable in - * an outer scope. - */ -class uncheckedCaptures extends StaticAnnotation - - diff --git a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala new file mode 100644 index 000000000000..477ac6d742f7 --- /dev/null +++ b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + From 6b91167a6b4d2de79afed8a9aa9326c8154f1b69 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 18:30:51 +0100 Subject: [PATCH 045/216] Also count @Sealed annotated abstract types as sealed Also count abstract types that have a @Sealed annotation on their bound as sealed. That way, we get free propagation into synthesized type parameters. We should probably unify this scheme and `sealed` modifiers. --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ad216c47d2f5..e4c6b60bb894 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -163,7 +163,8 @@ object CheckCaptures: capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") t.info match case TypeBounds(_, hi) - if !t.symbol.is(Sealed) && !t.symbol.isParametricIn(carrier) => + if !t.symbol.is(Sealed) && !hi.hasAnnotation(defn.Caps_SealedAnnot) + && !t.symbol.isParametricIn(carrier) => if hi.isAny then report.error( em"""$what cannot $have $tp since From d3876ad804f5a0306fae2cc57ad8a56a3744e5c1 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 18:38:29 +0100 Subject: [PATCH 046/216] Add ArrayBuffer and GrowableBuilder to stdlib --- .../captures/sealed-lowerbound.scala | 12 ++++++ .../stdlib/collection/IterableOnce.scala | 6 +-- .../stdlib/collection/Iterator.scala | 5 ++- tests/pos-special/stdlib/collection/Seq.scala | 4 +- .../pos-special/stdlib/collection/View.scala | 2 +- .../collection/mutable/ArrayBuffer.scala | 31 +++++++++------- .../stdlib/collection/mutable/Buffer.scala | 2 +- .../collection/mutable/GrowableBuilder.scala | 37 +++++++++++++++++++ 8 files changed, 78 insertions(+), 21 deletions(-) create mode 100644 tests/pos-custom-args/captures/sealed-lowerbound.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala new file mode 100644 index 000000000000..e848f784cddc --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-lowerbound.scala @@ -0,0 +1,12 @@ +def foo[sealed B](x: B): B = x + +def bar[B, sealed A >: B](x: A): A = foo[A](x) + +class C[sealed A] + +class CV[sealed A](x: Int): + def this() = this: + val x = new C[A]: + println("foo") + 0 + diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index 6836a3bac39a..3886d7dccb95 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -162,7 +162,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") - def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) @deprecated("Use .iterator.toArray", "2.13.0") def toArray[B >: A: ClassTag]: Array[B] = it match { @@ -272,7 +272,7 @@ object IterableOnce { math.max(math.min(math.min(len, srcLen), destLen - start), 0) /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ - @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], + @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A]^, xs: Array[B], start: Int = 0, len: Int = Int.MaxValue): Int = @@ -1312,7 +1312,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") @`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream) - @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + @`inline` final def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) /** Convert collection to array. * diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index ecd8d985bbf0..0c96aa35501b 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -18,6 +18,7 @@ import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.runtime.Statics import language.experimental.captureChecking import caps.unsafe.unsafeAssumePure +import annotation.unchecked.uncheckedCaptures /** Iterators are data structures that allow to iterate over a sequence @@ -416,7 +417,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = ArrayBuffer.from(this).scanRight(z)(op).iterator + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = + ArrayBuffer.from[A @uncheckedCaptures](this).scanRight(z)(op).iterator + // @uncheckedCaptures is safe since the ArrayBuffer is local temporrary storage def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index 81f39b3538cf..05b315c49a8c 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -18,6 +18,7 @@ import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn import language.experimental.captureChecking import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures /** Base trait for sequence collections * @@ -598,7 +599,8 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => if (!hasNext) Iterator.empty.next() - val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms + val forcedElms = new mutable.ArrayBuffer[A @uncheckedCaptures](elms.size) ++= elms + // uncheckedCaptures OK since used only locally val result = (newSpecificBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index 85910311a4c3..d91fc0c49939 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -78,7 +78,7 @@ object View extends IterableFactory[View] { def empty[A]: View[A] = Empty - def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + def newBuilder[sealed A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) override def apply[A](xs: A*): View[A] = new Elems(xs: _*) diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala index e3ddeb71ef8e..e0b3edc54690 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -20,6 +20,8 @@ import scala.annotation.nowarn import scala.annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An implementation of the `Buffer` class using an array to * represent the assembled sequence internally. Append, update and random @@ -40,7 +42,7 @@ import scala.collection.generic.DefaultSerializable * @define willNotTerminateInf */ @SerialVersionUID(-1582447879429021880L) -class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) +class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize: Int) extends AbstractBuffer[A] with IndexedBuffer[A] with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] @@ -151,7 +153,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) } // Overridden to use array copying for efficiency where possible. - override def addAll(elems: IterableOnce[A]): this.type = { + override def addAll(elems: IterableOnce[A]^): this.type = { elems match { case elems: ArrayBuffer[_] => val elemsLength = elems.size0 @@ -180,7 +182,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) this } - def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = { checkWithinBounds(index, index) elems match { case elems: collection.Iterable[A] => @@ -234,7 +236,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "ArrayBuffer" @@ -291,7 +293,7 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { final val DefaultInitialSize = 16 private[this] val emptyArray = new Array[AnyRef](0) - def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + def from[sealed B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = { val k = coll.knownSize if (k >= 0) { // Avoid reallocation of buffer if length is known @@ -303,12 +305,12 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { else new ArrayBuffer[B] ++= coll } - def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + def newBuilder[sealed A]: Builder[A, ArrayBuffer[A]] = new GrowableBuilder[A, ArrayBuffer[A]](empty) { override def sizeHint(size: Int): Unit = elems.ensureSize(size) } - def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + def empty[sealed A]: ArrayBuffer[A] = new ArrayBuffer[A]() /** * @param arrayLen the length of the backing array @@ -357,22 +359,23 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { } // TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` -final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) - extends AbstractIndexedSeqView[A] { +final class ArrayBufferView[sealed A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int) + extends AbstractIndexedSeqView[A], Pure { + /* Removed since it poses problems for capture checking @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") def this(array: Array[AnyRef], length: Int) = { // this won't actually track mutation, but it would be a pain to have the implementation // check if we have a method to get the current mutation count or not on every method and // change what it does based on that. hopefully no one ever calls this. this({ - val _array = array + val _array: Array[Object] = array val _length = length new ArrayBuffer[A](0) { this.array = _array this.size0 = _length - } + }: ArrayBuffer[A] }, () => 0) - } + }*/ @deprecated("never intended to be public", since = "2.13.7") def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] @@ -392,10 +395,10 @@ final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], muta override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 0a70c75bac0c..f5e6ce66c99a 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -18,7 +18,7 @@ import language.experimental.captureChecking /** A `Buffer` is a growable and shrinkable `Seq`. */ -trait Buffer[A] +trait Buffer[sealed A] extends Seq[A] with SeqOps[A, Buffer, Buffer[A]] with Growable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala new file mode 100644 index 000000000000..4d6f989e6f3d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable +import language.experimental.captureChecking + +/** The canonical builder for collections that are growable, i.e. that support an + * efficient `+=` method which adds an element to the collection. + * + * GrowableBuilders can produce only a single instance of the collection they are growing. + * + * @define Coll `GrowingBuilder` + * @define coll growing builder + */ +class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) + extends Builder[Elem, To] { + + def clear(): Unit = elems.clear() + + def result(): To = elems + + def addOne(elem: Elem): this.type = { elems += elem; this } + + override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this } + + override def knownSize: Int = elems.knownSize +} From 75c20583229af6ae5772eecee8a03b193085c59b Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 21:26:57 +0100 Subject: [PATCH 047/216] Coarse restriction to disallow local roots in external types This needs to be refined further for class members, similar to how we check that private types cannot escape from a class API. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 16 ++++++++++++++ tests/neg-custom-args/captures/filevar.scala | 2 +- .../neg-custom-args/captures/localcaps.check | 12 ++++++++++ .../neg-custom-args/captures/localcaps.scala | 2 +- tests/neg-custom-args/captures/pairs.check | 8 +++++++ tests/neg-custom-args/captures/pairs.scala | 4 ++-- .../recursive-leaking-local-cap.scala | 22 +++++++++++++++++++ .../captures/sealed-classes.scala | 21 ++++++++++++++++++ 8 files changed, 83 insertions(+), 4 deletions(-) create mode 100644 tests/neg-custom-args/captures/localcaps.check create mode 100644 tests/neg-custom-args/captures/recursive-leaking-local-cap.scala create mode 100644 tests/neg-custom-args/captures/sealed-classes.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index e4c6b60bb894..c7b282b49dba 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1301,6 +1301,20 @@ class CheckCaptures extends Recheck, SymTransformer: checker.traverse(tree.knownType) end healTypeParam + def checkNoLocalRootIn(sym: Symbol, info: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(tp: Type) = tp match + case tp: TermRef if tp.isLocalRootCapability => + if tp.localRootOwner == sym then + report.error(i"local root $tp cannot appear in type of $sym", pos) + case tp: ClassInfo => + traverseChildren(tp) + for mbr <- tp.decls do + if !mbr.is(Private) then checkNoLocalRootIn(sym, mbr.info, mbr.srcPos) + case _ => + traverseChildren(tp) + check.traverse(info) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1324,6 +1338,8 @@ class CheckCaptures extends Recheck, SymTransformer: checkBounds(normArgs, tl) args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) case _ => + case _: ValOrDefDef | _: TypeDef => + checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) case _ => end check end checker diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index c8280e2ff3b7..34588617c0b8 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,7 +5,7 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^{cap[Service]} = uninitialized + var file: File^{cap[Service]} = uninitialized // error def log = file.write("log") def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = diff --git a/tests/neg-custom-args/captures/localcaps.check b/tests/neg-custom-args/captures/localcaps.check new file mode 100644 index 000000000000..b09702749d10 --- /dev/null +++ b/tests/neg-custom-args/captures/localcaps.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-custom-args/captures/localcaps.scala:4:12 ---------------------------------------------------------- +4 | def x: C^{cap[d]} = ??? // error + | ^^^^^^ + | `d` does not name an outer definition that represents a capture level +-- Error: tests/neg-custom-args/captures/localcaps.scala:9:47 ---------------------------------------------------------- +9 | private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error + | ^^^^^^^ + | `z2` does not name an outer definition that represents a capture level +-- Error: tests/neg-custom-args/captures/localcaps.scala:6:6 ----------------------------------------------------------- +6 | def y: C^{cap[C]} = ??? // error + | ^ + | local root (cap[C] : caps.Cap) cannot appear in type of class C diff --git a/tests/neg-custom-args/captures/localcaps.scala b/tests/neg-custom-args/captures/localcaps.scala index f5227bfef96b..049a1ee0d775 100644 --- a/tests/neg-custom-args/captures/localcaps.scala +++ b/tests/neg-custom-args/captures/localcaps.scala @@ -3,7 +3,7 @@ class C: def x: C^{cap[d]} = ??? // error - def y: C^{cap[C]} = ??? // ok + def y: C^{cap[C]} = ??? // error private val z = (c0: caps.Cap) => (x: Int) => (c: C^{cap[C]}) => x // ok private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error diff --git a/tests/neg-custom-args/captures/pairs.check b/tests/neg-custom-args/captures/pairs.check index 38712469879f..9d1b3a76e164 100644 --- a/tests/neg-custom-args/captures/pairs.check +++ b/tests/neg-custom-args/captures/pairs.check @@ -12,3 +12,11 @@ | Required: Cap^ ->{d} Unit | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/pairs.scala:6:8 --------------------------------------------------------------- +6 | def fst: Cap^{cap[Pair]} ->{x} Unit = x // error + | ^ + | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair +-- Error: tests/neg-custom-args/captures/pairs.scala:7:8 --------------------------------------------------------------- +7 | def snd: Cap^{cap[Pair]} ->{y} Unit = y // error + | ^ + | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair diff --git a/tests/neg-custom-args/captures/pairs.scala b/tests/neg-custom-args/captures/pairs.scala index 4fc495d60f95..99b27639f729 100644 --- a/tests/neg-custom-args/captures/pairs.scala +++ b/tests/neg-custom-args/captures/pairs.scala @@ -3,8 +3,8 @@ object Monomorphic2: class Pair(x: Cap => Unit, y: Cap => Unit): - def fst: Cap^{cap[Pair]} ->{x} Unit = x - def snd: Cap^{cap[Pair]} ->{y} Unit = y + def fst: Cap^{cap[Pair]} ->{x} Unit = x // error + def snd: Cap^{cap[Pair]} ->{y} Unit = y // error def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala new file mode 100644 index 000000000000..0daecafbf9d0 --- /dev/null +++ b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking +trait Cap: + def use: Int = 42 + +def usingCap[sealed T](op: Cap^ => T): T = ??? + +def badTest(): Unit = + def bad(b: Boolean)(c: Cap^): Cap^{cap[bad]} = // error + if b then c + else + val leaked = usingCap[Cap^{cap[bad]}](bad(true)) + leaked.use // boom + c + + usingCap[Unit]: c0 => + bad(false)(c0) + +class Bad: + def foo: Cap^{cap[Bad]} = ??? // error + private def bar: Cap^{cap[Bad]} = ??? // ok + + diff --git a/tests/neg-custom-args/captures/sealed-classes.scala b/tests/neg-custom-args/captures/sealed-classes.scala new file mode 100644 index 000000000000..b8cb0acbf5c5 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-classes.scala @@ -0,0 +1,21 @@ +abstract class C1[A1]: + def set(x: A1): Unit + def get: A1 + +trait Co[+A]: + def get: A + +class C2[sealed A2] extends C1[A2], Co[A2]: // ok + private var x: A2 = ??? + def set(x: A2): Unit = + this.x = x + def get: A2 = x + +class C3[A3] extends C2[A3] // error + +abstract class C4[sealed A4] extends Co[A4] // ok + +abstract class C5[sealed +A5] extends Co[A5] // ok + +abstract class C6[A6] extends C5[A6] // error + From bb0b77434884758a069d4df19552aed4df9c165f Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:20:42 +0100 Subject: [PATCH 048/216] Require array element types to be sealed --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 6 ++++ .../src/dotty/tools/dotc/cc/CaptureSet.scala | 1 + .../dotty/tools/dotc/cc/CheckCaptures.scala | 31 ++++++++++++++----- .../dotty/tools/dotc/transform/Recheck.scala | 6 ++-- tests/neg-custom-args/captures/buffers.check | 26 ++++++++++++++++ tests/neg-custom-args/captures/buffers.scala | 30 ++++++++++++++++++ .../stdlib/collection/IterableOnce.scala | 19 ++++++------ .../stdlib/collection/Iterator.scala | 2 +- .../stdlib/collection/SeqView.scala | 3 +- .../stdlib/collection/StringOps.scala | 2 +- .../collection/mutable/ArrayBuffer.scala | 4 +-- .../stdlib/collection/mutable/Buffer.scala | 3 +- .../collection/mutable/StringBuilder.scala | 2 +- 13 files changed, 109 insertions(+), 26 deletions(-) create mode 100644 tests/neg-custom-args/captures/buffers.check create mode 100644 tests/neg-custom-args/captures/buffers.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index dccf07ba199e..0fe79da30ca5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -206,6 +206,12 @@ extension (tp: Type) case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) case _ => false + def isSealed(using Context): Boolean = tp match + case tp: TypeParamRef => tp.underlying.isSealed + case tp: TypeBounds => tp.hi.hasAnnotation(defn.Caps_SealedAnnot) + case tp: TypeRef => tp.symbol.is(Sealed) || tp.info.isSealed // TODO: drop symbol flag? + case _ => false + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 2586d449dfd4..7261c760aa01 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -872,6 +872,7 @@ object CaptureSet: upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty + else if ctx.mode.is(Mode.Printing) then upper else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") /** Apply `f` to each element in `xs`, and join result sets with `++` */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c7b282b49dba..3e246c754feb 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -148,7 +148,7 @@ object CheckCaptures: val check = new TypeTraverser: extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - val encl = carrier.owner.enclosingMethodOrClass + val encl = carrier.maybeOwner.enclosingMethodOrClass if encl.isClass then tparam.isParametricIn(encl) else def recur(encl: Symbol): Boolean = @@ -160,11 +160,9 @@ object CheckCaptures: def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") + capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") t.info match - case TypeBounds(_, hi) - if !t.symbol.is(Sealed) && !hi.hasAnnotation(defn.Caps_SealedAnnot) - && !t.symbol.isParametricIn(carrier) => + case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => if hi.isAny then report.error( em"""$what cannot $have $tp since @@ -543,8 +541,8 @@ class CheckCaptures extends Recheck, SymTransformer: val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] - for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do - if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then + for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do + if formal.isSealed then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" disallowRootCapabilitiesIn(arg.knownType, fn.symbol, i"Sealed type variable $pname", "be instantiated to", @@ -1315,6 +1313,23 @@ class CheckCaptures extends Recheck, SymTransformer: traverseChildren(tp) check.traverse(info) + def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(t: Type): Unit = + t match + case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) then + CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, + "Array", "have element type", + "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", + pos) + traverseChildren(t) + case defn.RefinedFunctionOf(rinfo: MethodType) => + traverse(rinfo) + case _ => + traverseChildren(t) + check.traverse(tp) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1340,6 +1355,8 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => case _: ValOrDefDef | _: TypeDef => checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) + case tree: TypeTree => + checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 9833b3cf177f..b15a58b98b6f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -596,9 +596,9 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase) { - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) - } + atPhase(thisPhase): + withMode(Mode.Printing): + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check new file mode 100644 index 000000000000..cdb7baa852fb --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.check @@ -0,0 +1,26 @@ +-- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^ + | mutable variable elems cannot have type Array[A] since + | that type refers to the type variable A, which is not sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- +16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + | ^^^^^^^^^^^ + | Sealed type variable A cannot be instantiated to box A^? since + | that type refers to the type variable A, which is not sealed. + | This is often caused by a local capability in an argument of constructor ArrayBuffer + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/buffers.scala:11:13 ----------------------------------------------------------- +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type refers to the type variable A, which is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ +22 | val x: Array[A] = new Array[A](10) // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type refers to the type variable A, which is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/buffers.scala b/tests/neg-custom-args/captures/buffers.scala new file mode 100644 index 000000000000..760ddab96ae5 --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.scala @@ -0,0 +1,30 @@ +import reflect.ClassTag + +class Buffer[A] + +class ArrayBuffer[sealed A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +class ArrayBufferBAD[A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) // error // error + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +object ArrayBuffer: + def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + elems = xs.toArray + def apply[sealed A: ClassTag](xs: A*) = new ArrayBuffer: + elems = xs.toArray // ok + +class EncapsArray[A: ClassTag]: + val x: Array[A] = new Array[A](10) // error + + + + + + + + diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index 3886d7dccb95..a88be4943c58 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -165,7 +165,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) @deprecated("Use .iterator.toArray", "2.13.0") - def toArray[B >: A: ClassTag]: Array[B] = it match { + def toArray[sealed B >: A: ClassTag]: Array[B] = it match { case it: Iterable[B] => it.toArray[B] case _ => it.iterator.toArray[B] } @@ -272,10 +272,11 @@ object IterableOnce { math.max(math.min(math.min(len, srcLen), destLen - start), 0) /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ - @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A]^, - xs: Array[B], - start: Int = 0, - len: Int = Int.MaxValue): Int = + @inline private[collection] def copyElemsToArray[A, sealed B >: A]( + elems: IterableOnce[A]^, + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = elems match { case src: Iterable[A] => src.copyToArray[B](xs, start, len) case src => src.iterator.copyToArray[B](xs, start, len) @@ -889,7 +890,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -906,7 +907,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -923,7 +924,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * * @note Reuse: $consumesIterator */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val it = iterator var i = start val end = start + math.min(len, xs.length - start) @@ -1318,7 +1319,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * * Implementation note: DO NOT call [[Array.from]] from this method. */ - def toArray[B >: A: ClassTag]: Array[B] = + def toArray[sealed B >: A: ClassTag]: Array[B] = if (knownSize >= 0) { val destination = new Array[B](knownSize) copyToArray(destination, 0) diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 0c96aa35501b..68ea4f76c249 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -259,7 +259,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } // segment must have data, and must be complete unless they allow partial val ok = index > 0 && (partial || index == size) - if (ok) buffer = builder.result().asInstanceOf[Array[B]] + if (ok) buffer = builder.result().asInstanceOf[Array[B @uncheckedCaptures]] else prev = null ok } diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala index f8f4f1e199de..a4ca1143f8b4 100644 --- a/tests/pos-special/stdlib/collection/SeqView.scala +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -16,6 +16,7 @@ package collection import scala.annotation.nowarn import language.experimental.captureChecking import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures /** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the * necessary functionality over which SeqViews are defined, and at the same @@ -195,7 +196,7 @@ object SeqView { // contains items of another type, we'd get a CCE anyway) // - the cast doesn't actually do anything in the runtime because the // type of A is not known and Array[_] is Array[AnyRef] - immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A @uncheckedCaptures]]) } } evaluated = true diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index f570531def98..3e3e2f8d872e 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -964,7 +964,7 @@ final class StringOps(private val s: String) extends AnyVal { else if (s.equalsIgnoreCase("false")) false else throw new IllegalArgumentException("For input string: \""+s+"\"") - def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = + def toArray[sealed B >: Char](implicit tag: ClassTag[B]): Array[B] = if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] else new WrappedString(s).toArray[B] diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala index e0b3edc54690..8fa1e6edd566 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -241,7 +241,7 @@ class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "ArrayBuffer" - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(array, 0, xs, start, copied) @@ -258,7 +258,7 @@ class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { if (length > 1) { mutationCount += 1 - scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]], 0, length) } this } diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index f5e6ce66c99a..0f472dc9ac82 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -15,6 +15,7 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** A `Buffer` is a growable and shrinkable `Seq`. */ @@ -185,7 +186,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[IterableOnce[A]^](s) + val newElems = new Array[(IterableOnce[A]^) @uncheckedCaptures](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index c7859214821d..5320fa1dabb0 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -110,7 +110,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr override def toString: String = result() - override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + override def toArray[sealed B >: Char](implicit ct: scala.reflect.ClassTag[B]) = ct.runtimeClass match { case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] case _ => super.toArray From 6b5e4948b6977591e168692e15a5193e3c86cafb Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:26:43 +0100 Subject: [PATCH 049/216] Add ArrayBuilder.scala in unchanged form to stdlib --- .../collection/mutable/ArrayBuilder.scala | 522 ++++++++++++++++++ 1 file changed, 522 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala new file mode 100644 index 000000000000..454527bcdebd --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -0,0 +1,522 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.reflect.ClassTag + +/** A builder class for arrays. + * + * @tparam T the type of the elements for the builder. + */ +@SerialVersionUID(3L) +sealed abstract class ArrayBuilder[T] + extends ReusableBuilder[T, Array[T]] + with Serializable { + protected[this] var capacity: Int = 0 + protected[this] def elems: Array[T] + protected var size: Int = 0 + + def length: Int = size + + override def knownSize: Int = size + + protected[this] final def ensureSize(size: Int): Unit = { + if (capacity < size || capacity == 0) { + var newsize = if (capacity == 0) 16 else capacity * 2 + while (newsize < size) newsize *= 2 + resize(newsize) + } + } + + override final def sizeHint(size: Int): Unit = + if (capacity < size) resize(size) + + def clear(): Unit = size = 0 + + protected[this] def resize(size: Int): Unit + + /** Add all elements of an array */ + def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) + + /** Add a slice of an array */ + def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { + ensureSize(this.size + length) + Array.copy(xs, offset, elems, this.size, length) + size += length + this + } + + override def addAll(xs: IterableOnce[T]): this.type = { + val k = xs.knownSize + if (k > 0) { + ensureSize(this.size + k) + val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + size += k + } else if (k < 0) super.addAll(xs) + this + } +} + +/** A companion object for array builders. + */ +object ArrayBuilder { + + /** Creates a new arraybuilder of type `T`. + * + * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. + * @return a new empty array builder. + */ + @inline def make[T: ClassTag]: ArrayBuilder[T] = { + val tag = implicitly[ClassTag[T]] + tag.runtimeClass match { + case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] + case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] + case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] + case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] + case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] + case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] + case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] + case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] + case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] + case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] + } + } + + /** A class for array builders for arrays of reference types. + * + * This builder can be reused. + * + * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. + */ + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { + + protected var elems: Array[T] = _ + + private def mkArray(size: Int): Array[T] = { + if (capacity == size && capacity > 0) elems + else if (elems eq null) new Array[T](size) + else java.util.Arrays.copyOf[T](elems, size) + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: T): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[T] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def clear(): Unit = { + super.clear() + if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) + } + + override def equals(other: Any): Boolean = other match { + case x: ofRef[_] => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofRef" + } + + /** A class for array builders for arrays of `byte`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofByte extends ArrayBuilder[Byte] { + + protected var elems: Array[Byte] = _ + + private def mkArray(size: Int): Array[Byte] = { + val newelems = new Array[Byte](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Byte): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Byte] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofByte => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofByte" + } + + /** A class for array builders for arrays of `short`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofShort extends ArrayBuilder[Short] { + + protected var elems: Array[Short] = _ + + private def mkArray(size: Int): Array[Short] = { + val newelems = new Array[Short](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Short): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Short] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofShort => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofShort" + } + + /** A class for array builders for arrays of `char`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofChar extends ArrayBuilder[Char] { + + protected var elems: Array[Char] = _ + + private def mkArray(size: Int): Array[Char] = { + val newelems = new Array[Char](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Char): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Char] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofChar => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofChar" + } + + /** A class for array builders for arrays of `int`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofInt extends ArrayBuilder[Int] { + + protected var elems: Array[Int] = _ + + private def mkArray(size: Int): Array[Int] = { + val newelems = new Array[Int](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Int): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Int] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofInt => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofInt" + } + + /** A class for array builders for arrays of `long`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofLong extends ArrayBuilder[Long] { + + protected var elems: Array[Long] = _ + + private def mkArray(size: Int): Array[Long] = { + val newelems = new Array[Long](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Long): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Long] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofLong => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofLong" + } + + /** A class for array builders for arrays of `float`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofFloat extends ArrayBuilder[Float] { + + protected var elems: Array[Float] = _ + + private def mkArray(size: Int): Array[Float] = { + val newelems = new Array[Float](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Float): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Float] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofFloat => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofFloat" + } + + /** A class for array builders for arrays of `double`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofDouble extends ArrayBuilder[Double] { + + protected var elems: Array[Double] = _ + + private def mkArray(size: Int): Array[Double] = { + val newelems = new Array[Double](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Double): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Double] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofDouble => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofDouble" + } + + /** A class for array builders for arrays of `boolean`s. It can be reused. */ + @SerialVersionUID(3L) + class ofBoolean extends ArrayBuilder[Boolean] { + + protected var elems: Array[Boolean] = _ + + private def mkArray(size: Int): Array[Boolean] = { + val newelems = new Array[Boolean](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Boolean): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Boolean] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofBoolean => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofBoolean" + } + + /** A class for array builders for arrays of `Unit` type. It can be reused. */ + @SerialVersionUID(3L) + final class ofUnit extends ArrayBuilder[Unit] { + + protected def elems: Array[Unit] = throw new UnsupportedOperationException() + + def addOne(elem: Unit): this.type = { + size += 1 + this + } + + override def addAll(xs: IterableOnce[Unit]): this.type = { + size += xs.iterator.size + this + } + + override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { + size += length + this + } + + def result() = { + val ans = new Array[Unit](size) + var i = 0 + while (i < size) { ans(i) = (); i += 1 } + ans + } + + override def equals(other: Any): Boolean = other match { + case x: ofUnit => (size == x.size) + case _ => false + } + + protected[this] def resize(size: Int): Unit = () + + override def toString = "ArrayBuilder.ofUnit" + } +} From 609549c43875e3c40c6a1a99f826f76e67c348c1 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:28:59 +0100 Subject: [PATCH 050/216] Make ArrayBuilder capture checked --- .../stdlib/collection/mutable/ArrayBuilder.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala index 454527bcdebd..0620d3d23061 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -13,6 +13,7 @@ package scala.collection package mutable +import language.experimental.captureChecking import scala.reflect.ClassTag /** A builder class for arrays. @@ -20,7 +21,7 @@ import scala.reflect.ClassTag * @tparam T the type of the elements for the builder. */ @SerialVersionUID(3L) -sealed abstract class ArrayBuilder[T] +sealed abstract class ArrayBuilder[sealed T] extends ReusableBuilder[T, Array[T]] with Serializable { protected[this] var capacity: Int = 0 @@ -57,7 +58,7 @@ sealed abstract class ArrayBuilder[T] this } - override def addAll(xs: IterableOnce[T]): this.type = { + override def addAll(xs: IterableOnce[T]^): this.type = { val k = xs.knownSize if (k > 0) { ensureSize(this.size + k) @@ -493,7 +494,7 @@ object ArrayBuilder { this } - override def addAll(xs: IterableOnce[Unit]): this.type = { + override def addAll(xs: IterableOnce[Unit]^): this.type = { size += xs.iterator.size this } From d76f092e26ba3233a00df1b4e93d99a46dae4327 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:30:11 +0100 Subject: [PATCH 051/216] Add ArrayDeque.scala in unchanged for to stdlib --- .../collection/mutable/ArrayDeque.scala | 645 ++++++++++++++++++ 1 file changed, 645 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala new file mode 100644 index 000000000000..205e1607f824 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -0,0 +1,645 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag + +/** An implementation of a double-ended queue that internally uses a resizable circular buffer. + * + * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) + * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) + * and thus insertions and removals from end/beginning are fast. + * + * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. + * + * @tparam A the type of this ArrayDeque's elements. + * + * @define Coll `mutable.ArrayDeque` + * @define coll array deque + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class ArrayDeque[A] protected ( + protected var array: Array[AnyRef], + private[ArrayDeque] var start: Int, + private[ArrayDeque] var end: Int +) extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with IterableFactoryDefaults[A, ArrayDeque] + with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] + with Cloneable[ArrayDeque[A]] + with DefaultSerializable { + + reset(array, start, end) + + private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { + assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") + requireBounds(idx = start, until = array.length) + requireBounds(idx = end, until = array.length) + this.array = array + this.start = start + this.end = end + } + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + // No-Op override to allow for more efficient stepper in a minor release. + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) + + def apply(idx: Int): A = { + requireBounds(idx) + _get(idx) + } + + def update(idx: Int, elem: A): Unit = { + requireBounds(idx) + _set(idx, elem) + } + + def addOne(elem: A): this.type = { + ensureSize(length + 1) + appendAssumingCapacity(elem) + } + + def prepend(elem: A): this.type = { + ensureSize(length + 1) + prependAssumingCapacity(elem) + } + + @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { + array(end) = elem.asInstanceOf[AnyRef] + end = end_+(1) + this + } + + @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { + start = start_-(1) + array(start) = elem.asInstanceOf[AnyRef] + this + } + + override def prependAll(elems: IterableOnce[A]): this.type = { + val it = elems.iterator + if (it.nonEmpty) { + val n = length + // The following code resizes the current collection at most once and traverses elems at most twice + elems.knownSize match { + // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq + case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) + + // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront + case srcLength if mustGrow(srcLength + n) => + val finalLength = srcLength + n + val array2 = ArrayDeque.alloc(finalLength) + it.copyToArray(array2.asInstanceOf[Array[A]]) + copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + + // Just fill up from (start - srcLength) to (start - 1) and move back start + case srcLength => + // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` + var i = 0 + while(i < srcLength) { + _set(i - srcLength, it.next()) + i += 1 + } + start = start_-(srcLength) + } + } + this + } + + override def addAll(elems: IterableOnce[A]): this.type = { + elems.knownSize match { + case srcLength if srcLength > 0 => + ensureSize(srcLength + length) + elems.iterator.foreach(appendAssumingCapacity) + case _ => elems.iterator.foreach(+=) + } + this + } + + def insert(idx: Int, elem: A): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prepend(elem) + } else if (idx == n) { + addOne(elem) + } else { + val finalLength = n + 1 + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + array2(idx) = elem.asInstanceOf[AnyRef] + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (n <= idx * 2) { + var i = n - 1 + while(i >= idx) { + _set(i + 1, _get(i)) + i -= 1 + } + end = end_+(1) + i += 1 + _set(i, elem) + } else { + var i = 0 + while(i < idx) { + _set(i - 1, _get(i)) + i += 1 + } + start = start_-(1) + _set(i, elem) + } + } + } + + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prependAll(elems) + } else if (idx == n) { + addAll(elems) + } else { + // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) + val (it, srcLength) = { + val _srcLength = elems.knownSize + if (_srcLength >= 0) (elems.iterator, _srcLength) + else { + val indexed = IndexedSeq.from(elems) + (indexed.iterator, indexed.size) + } + } + if (it.nonEmpty) { + val finalLength = srcLength + n + // Either we resize right away or move prefix left or suffix right + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + it.copyToArray(array2.asInstanceOf[Array[A]], idx) + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx >= n) { // Cheaper to shift the suffix right + var i = n - 1 + while(i >= idx) { + _set(i + srcLength, _get(i)) + i -= 1 + } + end = end_+(srcLength) + while(it.hasNext) { + i += 1 + _set(i, it.next()) + } + } else { // Cheaper to shift prefix left + var i = 0 + while(i < idx) { + _set(i - srcLength, _get(i)) + i += 1 + } + start = start_-(srcLength) + while(it.hasNext) { + _set(i, it.next()) + i += 1 + } + } + } + } + } + + def remove(idx: Int, count: Int): Unit = { + if (count > 0) { + requireBounds(idx) + val n = length + val removals = Math.min(n - idx, count) + val finalLength = n - removals + val suffixStart = idx + removals + // If we know we can resize after removing, do it right away using arrayCopy + // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left + if (shouldShrink(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx <= finalLength) { // Cheaper to move the prefix right + var i = suffixStart - 1 + while(i >= removals) { + _set(i, _get(i - removals)) + i -= 1 + } + while(i >= 0) { + _set(i, null.asInstanceOf[A]) + i -= 1 + } + start = start_+(removals) + } else { // Cheaper to move the suffix left + var i = idx + while(i < finalLength) { + _set(i, _get(i + removals)) + i += 1 + } + while(i < n) { + _set(i, null.asInstanceOf[A]) + i += 1 + } + end = end_-(removals) + } + } else { + require(count == 0, s"removing negative number of elements: $count") + } + } + + def remove(idx: Int): A = { + val elem = this(idx) + remove(idx, 1) + elem + } + + override def subtractOne(elem: A): this.type = { + val idx = indexOf(elem) + if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API + this + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the first element (throws exception when empty) + * See also removeHeadOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeHead(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) + + @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + val elem = array(start) + array(start) = null + start = start_+(1) + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the last element (throws exception when empty) + * See also removeLastOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeLast(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) + + @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + end = end_-(1) + val elem = array(end) + array(end) = null + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * Remove all elements from this collection and return the elements while emptying this data structure + * @return + */ + def removeAll(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Remove all elements from this collection and return the elements in reverse while emptying this data structure + * @return + */ + def removeAllReverse(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the left of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(headOption.exists(f)) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the right of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(lastOption.exists(f)) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** Returns the first element which satisfies the given predicate after or at some start index + * and removes this element from the collections + * + * @param p the predicate used for choosing the first element + * @param from the start index + * @return the first element of the queue for which p yields true + */ + def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { + val i = indexWhere(p, from) + if (i < 0) None else Some(remove(i)) + } + + /** Returns all elements in this collection which satisfy the given predicate + * and removes those elements from this collections. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { + val res = scala.collection.immutable.Seq.newBuilder[A] + var i, j = 0 + while (i < size) { + if (p(this(i))) { + res += this(i) + } else { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + if (i != j) takeInPlace(j) + res.result() + } + + @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) + + def length = end_-(start) + + override def isEmpty = start == end + + override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) + + override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque + + /** + * Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = { + while(nonEmpty) { + removeHeadAssumingNonEmpty() + } + } + + /** + * Clears this buffer and shrinks to @param size + * + * @param size + * @return + */ + def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { + reset(array = ArrayDeque.alloc(size), start = 0, end = 0) + this + } + + protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = + new ArrayDeque[A](array, start = 0, end) + + override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) + if (copied > 0) { + copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) + } + copied + } + + override def toArray[B >: A: ClassTag]: Array[B] = + copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) + + /** + * Trims the capacity of this ArrayDeque's instance to be the current size + */ + def trimToSize(): Unit = resize(length) + + // Utils for common modular arithmetic: + @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) + @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) + @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) + @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) + + // Note: here be overflow dragons! This is used for int overflow + // assumptions in resize(). Use caution changing. + @inline private[this] def mustGrow(len: Int) = { + len >= array.length + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def shouldShrink(len: Int) = { + // To avoid allocation churn, only shrink when array is large + // and less than 2/5 filled. + array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def canShrink(len: Int) = { + array.length > ArrayDeque.DefaultInitialSize && array.length - len > len + } + + @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] + + @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] + + // Assumes that 0 <= len. + private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { + val n = length + val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) + reset(array = array2, start = 0, end = n) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayDeque" +} + +/** + * $factoryInfo + * @define coll array deque + * @define Coll `ArrayDeque` + */ +@SerialVersionUID(3L) +object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { + + def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + val s = coll.knownSize + if (s >= 0) { + val array = alloc(s) + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") + new ArrayDeque[B](array, start = 0, end = s) + } else new ArrayDeque[B]() ++= coll + } + + def newBuilder[A]: Builder[A, ArrayDeque[A]] = + new GrowableBuilder[A, ArrayDeque[A]](empty) { + override def sizeHint(size: Int): Unit = { + elems.ensureSize(size) + } + } + + def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + + final val DefaultInitialSize = 16 + + /** + * We try to not repeatedly resize arrays smaller than this + */ + private[ArrayDeque] final val StableSize = 128 + + /** + * Allocates an array whose size is next power of 2 > `len` + * Largest possible len is 1<<30 - 1 + * + * @param len + * @return + */ + private[mutable] def alloc(len: Int) = { + require(len >= 0, s"Non-negative array size required") + val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 + require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") + new Array[AnyRef](Math.max(size, DefaultInitialSize)) + } +} + +trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { + protected def array: Array[AnyRef] + + final override def clone(): C = klone() + + protected def klone(): C + + protected def ofArray(array: Array[AnyRef], end: Int): C + + protected def start_+(idx: Int): Int + + @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = + if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})") + + /** + * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray + * This copies maxItems elements from this collections srcStart to dest's destStart + * If we reach the end of either collections before we could copy maxItems, we simply stop copying + * + * @param dest + * @param srcStart + * @param destStart + * @param maxItems + */ + def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { + requireBounds(destStart, dest.length+1) + val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) + if (toCopy > 0) { + requireBounds(srcStart) + val startIdx = start_+(srcStart) + val block1 = Math.min(toCopy, array.length - startIdx) + Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) + val block2 = toCopy - block1 + if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) + } + dest + } + + override def reverse: C = { + val n = length + val arr = ArrayDeque.alloc(n) + var i = 0 + while(i < n) { + arr(i) = this(n - i - 1).asInstanceOf[AnyRef] + i += 1 + } + ofArray(arr, n) + } + + override def slice(from: Int, until: Int): C = { + val n = length + val left = Math.max(0, Math.min(n, from)) + val right = Math.max(0, Math.min(n, until)) + val len = right - left + if (len <= 0) { + empty + } else if (len >= n) { + klone() + } else { + val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) + ofArray(array2, len) + } + } + + override def sliding(window: Int, step: Int): Iterator[C] = { + require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive") + length match { + case 0 => Iterator.empty + case n if n <= window => Iterator.single(slice(0, length)) + case n => + val lag = if (window > step) window - step else 0 + Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window)) + } + } + + override def grouped(n: Int): Iterator[C] = sliding(n, n) +} From e33fc7d24c5afdc20108fad41318efe19ce401d7 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:33:48 +0100 Subject: [PATCH 052/216] Make ArrayDeque capture checked --- .../collection/mutable/ArrayDeque.scala | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala index 205e1607f824..8c6b059cd8a6 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -18,6 +18,7 @@ import scala.annotation.nowarn import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.reflect.ClassTag +import language.experimental.captureChecking /** An implementation of a double-ended queue that internally uses a resizable circular buffer. * @@ -36,7 +37,7 @@ import scala.reflect.ClassTag * @define mayNotTerminateInf * @define willNotTerminateInf */ -class ArrayDeque[A] protected ( +class ArrayDeque[sealed A] protected ( protected var array: Array[AnyRef], private[ArrayDeque] var start: Int, private[ArrayDeque] var end: Int @@ -99,7 +100,7 @@ class ArrayDeque[A] protected ( this } - override def prependAll(elems: IterableOnce[A]): this.type = { + override def prependAll(elems: IterableOnce[A]^): this.type = { val it = elems.iterator if (it.nonEmpty) { val n = length @@ -130,7 +131,7 @@ class ArrayDeque[A] protected ( this } - override def addAll(elems: IterableOnce[A]): this.type = { + override def addAll(elems: IterableOnce[A]^): this.type = { elems.knownSize match { case srcLength if srcLength > 0 => ensureSize(srcLength + length) @@ -176,7 +177,7 @@ class ArrayDeque[A] protected ( } } - def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { requireBounds(idx, length+1) val n = length if (idx == 0) { @@ -462,7 +463,7 @@ class ArrayDeque[A] protected ( protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = new ArrayDeque[A](array, start = 0, end) - override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](dest: Array[B], destStart: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) if (copied > 0) { copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) @@ -470,7 +471,7 @@ class ArrayDeque[A] protected ( copied } - override def toArray[B >: A: ClassTag]: Array[B] = + override def toArray[sealed B >: A: ClassTag]: Array[B] = copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) /** @@ -525,7 +526,7 @@ class ArrayDeque[A] protected ( @SerialVersionUID(3L) object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { - def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + def from[sealed B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { val s = coll.knownSize if (s >= 0) { val array = alloc(s) @@ -535,14 +536,14 @@ object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { } else new ArrayDeque[B]() ++= coll } - def newBuilder[A]: Builder[A, ArrayDeque[A]] = + def newBuilder[sealed A]: Builder[A, ArrayDeque[A]] = new GrowableBuilder[A, ArrayDeque[A]](empty) { override def sizeHint(size: Int): Unit = { elems.ensureSize(size) } } - def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + def empty[sealed A]: ArrayDeque[A] = new ArrayDeque[A]() final val DefaultInitialSize = 16 From 37b41b663398899bfc4cf0dacb5fc58b7457addc Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:34:45 +0100 Subject: [PATCH 053/216] Add Stepper.scala and ArraySeq.scala in unchanged form to stdlib --- .../stdlib/collection/Stepper.scala | 368 ++++++++++++++++++ .../stdlib/collection/mutable/ArraySeq.scala | 347 +++++++++++++++++ 2 files changed, 715 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/Stepper.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArraySeq.scala diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala new file mode 100644 index 000000000000..0eeb8a44cb72 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -0,0 +1,368 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} +import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** Steppers exist to enable creating Java streams over Scala collections, see + * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections + * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. + * + * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference + * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are + * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). + * These enable iterating over collections holding unboxed primitives (e.g., Arrays, + * [[scala.jdk.Accumulator]]s) without boxing the elements. + * + * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized + * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) + * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). + * + * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive + * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. + * + * @tparam A the element type of the Stepper + */ +trait Stepper[@specialized(Double, Int, Long) +A] { + /** Check if there's an element available. */ + def hasStep: Boolean + + /** Return the next element and advance the stepper */ + def nextStep(): A + + /** Split this stepper, if applicable. The elements of the current Stepper are split up between + * the resulting Stepper and the current stepper. + * + * May return `null`, in which case the current Stepper yields the same elements as before. + * + * See method `trySplit` in [[java.util.Spliterator]]. + */ + def trySplit(): Stepper[A] + + /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See + * method `estimateSize` in [[java.util.Spliterator]]. + */ + def estimateSize: Long + + /** Returns a set of characteristics of this Stepper and its elements. See method + * `characteristics` in [[java.util.Spliterator]]. + */ + def characteristics: Int + + /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. + * + * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning + * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] + * (which is a `Stepper[Int]`). + */ + def spliterator[B >: A]: Spliterator[_] + + /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. + * + * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning + * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass + * [[IntStepper]] (which is a `Stepper[Int]`). + */ + def javaIterator[B >: A]: JIterator[_] + + /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to + * primitive Steppers box the elements. + */ + def iterator: Iterator[A] = new AbstractIterator[A] { + def hasNext: Boolean = hasStep + def next(): A = nextStep() + } +} + +object Stepper { + /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time + * and space complexity, and that the division is likely to be reasonably even. Steppers marked + * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method + * defined in [[scala.jdk.StreamConverters]]. + */ + trait EfficientSplit + + private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") + + /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. + * This provides a basis for more efficient stream processing on unboxed values provided that the original source + * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided + * (see for example IntArrayStepper and WidenedByteArrayStepper). */ + + private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingDoubleStepper(s) + } + } + + private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingIntStepper(s) + } + } + + private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): LongStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingLongStepper(s) + } + } + + private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingByteStepper(s) + } + } + + private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingCharStepper(s) + } + } + + private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingShortStepper(s) + } + } + + private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingFloatStepper(s) + } + } +} + +/** A Stepper for arbitrary element types. See [[Stepper]]. */ +trait AnyStepper[+A] extends Stepper[A] { + def trySplit(): AnyStepper[A] + + def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + + def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { + def hasNext: Boolean = hasStep + def next(): B = nextStep() + } +} + +object AnyStepper { + class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + def tryAdvance(c: Consumer[_ >: A]): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + def trySplit(): Spliterator[A] = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: A]): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + } + + def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) + def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit + + def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) + def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit + + def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) + def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit + + private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Double] = { + val s = st.trySplit() + if (s == null) null else new BoxedDoubleStepper(s) + } + } + + private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Int] = { + val s = st.trySplit() + if (s == null) null else new BoxedIntStepper(s) + } + } + + private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Long] = { + val s = st.trySplit() + if (s == null) null else new BoxedLongStepper(s) + } + } +} + +/** A Stepper for Ints. See [[Stepper]]. */ +trait IntStepper extends Stepper[Int] { + def trySplit(): IntStepper + + def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + + def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { + def hasNext: Boolean = hasStep + def nextInt(): Int = nextStep() + } +} +object IntStepper { + class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + def tryAdvance(c: IntConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { + case ic: IntConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfInt = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: IntConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { + case ic: IntConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Doubles. See [[Stepper]]. */ +trait DoubleStepper extends Stepper[Double] { + def trySplit(): DoubleStepper + + def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def hasNext: Boolean = hasStep + def nextDouble(): Double = nextStep() + } +} + +object DoubleStepper { + class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + def tryAdvance(c: DoubleConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { + case ic: DoubleConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfDouble = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: DoubleConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { + case ic: DoubleConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Longs. See [[Stepper]]. */ +trait LongStepper extends Stepper[Long] { + def trySplit(): LongStepper + + def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + + def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def hasNext: Boolean = hasStep + def nextLong(): Long = nextStep() + } +} + +object LongStepper { + class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + def tryAdvance(c: LongConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { + case ic: LongConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfLong = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: LongConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { + case ic: LongConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } + } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..74ab6b2107e5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -0,0 +1,347 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.util.Arrays + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + def elemTag = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + def elemTag = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + def elemTag = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + def elemTag = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + def elemTag = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + def elemTag = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + def elemTag = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + def elemTag = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + def elemTag = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} From ec70a238d84027ba907b777d002ef3cc763fc8e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 16:07:09 +0100 Subject: [PATCH 054/216] Make Stepper capture checked --- .../stdlib/collection/Stepper.scala | 40 +++--- .../stdlib/collection/StepperShape.scala | 115 ++++++++++++++++++ 2 files changed, 140 insertions(+), 15 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/StepperShape.scala diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala index 0eeb8a44cb72..0a0ac0075990 100644 --- a/tests/pos-special/stdlib/collection/Stepper.scala +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -15,6 +15,7 @@ package scala.collection import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} import java.{lang => jl} +import language.experimental.captureChecking import scala.collection.Stepper.EfficientSplit @@ -38,6 +39,8 @@ import scala.collection.Stepper.EfficientSplit * @tparam A the element type of the Stepper */ trait Stepper[@specialized(Double, Int, Long) +A] { + this: Stepper[A]^ => + /** Check if there's an element available. */ def hasStep: Boolean @@ -183,9 +186,11 @@ object Stepper { /** A Stepper for arbitrary element types. See [[Stepper]]. */ trait AnyStepper[+A] extends Stepper[A] { + this: AnyStepper[A]^ => + def trySplit(): AnyStepper[A] - def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this) def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { def hasNext: Boolean = hasStep @@ -194,10 +199,10 @@ trait AnyStepper[+A] extends Stepper[A] { } object AnyStepper { - class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] { def tryAdvance(c: Consumer[_ >: A]): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false - def trySplit(): Spliterator[A] = { + def trySplit(): Spliterator[A]^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -253,9 +258,11 @@ object AnyStepper { /** A Stepper for Ints. See [[Stepper]]. */ trait IntStepper extends Stepper[Int] { + this: IntStepper^ => + def trySplit(): IntStepper - def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this) def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { def hasNext: Boolean = hasStep @@ -263,7 +270,7 @@ trait IntStepper extends Stepper[Int] { } } object IntStepper { - class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt { def tryAdvance(c: IntConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -272,7 +279,7 @@ object IntStepper { case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfInt = { + override def trySplit(): Spliterator.OfInt^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -291,18 +298,19 @@ object IntStepper { /** A Stepper for Doubles. See [[Stepper]]. */ trait DoubleStepper extends Stepper[Double] { + this: DoubleStepper^ => def trySplit(): DoubleStepper - def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this) - def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble { def hasNext: Boolean = hasStep def nextDouble(): Double = nextStep() } } object DoubleStepper { - class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble { def tryAdvance(c: DoubleConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -311,7 +319,7 @@ object DoubleStepper { case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfDouble = { + override def trySplit(): Spliterator.OfDouble^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -330,18 +338,20 @@ object DoubleStepper { /** A Stepper for Longs. See [[Stepper]]. */ trait LongStepper extends Stepper[Long] { - def trySplit(): LongStepper + this: LongStepper^ => + + def trySplit(): LongStepper^{this} - def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this) - def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong { def hasNext: Boolean = hasStep def nextLong(): Long = nextStep() } } object LongStepper { - class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong { def tryAdvance(c: LongConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -350,7 +360,7 @@ object LongStepper { case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfLong = { + override def trySplit(): Spliterator.OfLong^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala new file mode 100644 index 000000000000..c6b520400d89 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StepperShape.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.{lang => jl} + +import language.experimental.captureChecking +import scala.collection.Stepper.EfficientSplit + +/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly + * specialized Stepper `S` according to the element type `T`. + */ +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure { + /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ + def shape: StepperShape.Shape + + /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def seqUnbox(st: AnyStepper[T]): S + + /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit +} + +object StepperShape extends StepperShapeLowPriority1 { + class Shape private[StepperShape] (private val s: Int) extends AnyVal + + // reference + val ReferenceShape = new Shape(0) + + // primitive + val IntShape = new Shape(1) + val LongShape = new Shape(2) + val DoubleShape = new Shape(3) + + // widening + val ByteShape = new Shape(4) + val ShortShape = new Shape(5) + val CharShape = new Shape(6) + val FloatShape = new Shape(7) + + implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { + def shape = IntShape + def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) + def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit + } + implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] + + implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { + def shape = LongShape + def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) + def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit + } + implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] + + implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { + def shape = DoubleShape + def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) + def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit + } + implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] + + implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { + def shape = ByteShape + def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) + def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit + } + implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] + + implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { + def shape = ShortShape + def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) + def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit + } + implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] + + implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { + def shape = CharShape + def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) + def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit + } + implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] + + implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { + def shape = FloatShape + def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) + def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit + } + implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] +} + +trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { + implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] +} + +trait StepperShapeLowPriority2 { + implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] + + protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { + def shape = StepperShape.ReferenceShape + def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st + def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st + } +} \ No newline at end of file From 24b8a4c87a74d3e2d4e1a37b92ef97d18de48c9f Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:05:03 +0100 Subject: [PATCH 055/216] Don't generate capture set variables for self types of pure classes The tricky thing here is how to recognize that a class is pure since that is known only during capture checking and we are at Setup, the phase before. But we can approximate by treating the `Pure` trait as definitely pure. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 ++- .../dotty/tools/dotc/core/Definitions.scala | 2 +- tests/pos-custom-args/captures/steppers.scala | 27 +++++++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/steppers.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 68fd79048f41..e90a8394f87d 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -522,7 +522,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: tree.symbol match case cls: ClassSymbol => val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then + if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic) + && !cls.isPureClass + then // add capture set to self type of nested classes if no self type is given explicitly. val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls)) val ps1 = inContext(ctx.withOwner(cls)): diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 205d43cd07ca..40370973ebf0 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1443,7 +1443,7 @@ class Definitions { /** Base classes that are assumed to be pure for the purposes of capture checking. * Every class inheriting from a pure baseclass is pure. */ - @tu lazy val pureBaseClasses = Set(defn.ThrowableClass) + @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass) /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, */ diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala new file mode 100644 index 000000000000..815ac938b492 --- /dev/null +++ b/tests/pos-custom-args/captures/steppers.scala @@ -0,0 +1,27 @@ + +trait Stepper[+A]: + this: Stepper[A]^ => + +object Stepper: + trait EfficientSplit + +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure + +trait IterableOnce[+A] extends Any: + this: IterableOnce[A]^ => + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ??? + +sealed abstract class ArraySeq[sealed T] extends IterableOnce[T], Pure: + def array: Array[_] + + def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]] + ArraySeq.make(arr).asInstanceOf[ArraySeq[T]] + +object ArraySeq: + + def make[sealed T](x: Array[T]): ArraySeq[T] = ??? + + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure: + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ??? + From 6ba065ee658eeaa4286979b7baeaf0fe0de7c134 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:05:37 +0100 Subject: [PATCH 056/216] Make ArraySeq capture checked --- .../stdlib/collection/mutable/ArraySeq.scala | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala index 74ab6b2107e5..9bdb28517eff 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -19,6 +19,8 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl._ import scala.reflect.ClassTag import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same @@ -34,7 +36,7 @@ import scala.util.hashing.MurmurHash3 * @define willNotTerminateInf */ @SerialVersionUID(3L) -sealed abstract class ArraySeq[T] +sealed abstract class ArraySeq[sealed T] extends AbstractSeq[T] with IndexedSeq[T] with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] @@ -43,14 +45,15 @@ sealed abstract class ArraySeq[T] override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged - override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] val s = coll.knownSize if(s > 0) b.sizeHint(s) b ++= coll ArraySeq.make(b.result()) } - override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = + ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive @@ -71,9 +74,9 @@ sealed abstract class ArraySeq[T] override protected[this] def className = "ArraySeq" /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]]) - override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: T](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(array, 0, xs, start, copied) @@ -89,10 +92,10 @@ sealed abstract class ArraySeq[T] } override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]]) this } } @@ -107,9 +110,9 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) - def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) /** * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type @@ -123,7 +126,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` * at runtime. */ - def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + def make[sealed T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) From bb650465e3c1e2833a7247fc0602dc3a4897e745 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:13:19 +0100 Subject: [PATCH 057/216] Add mutable/IndexedSeq.scala to stdlib (capture checks out of the box) --- .../collection/mutable/IndexedSeq.scala | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala new file mode 100644 index 000000000000..022970b4c56f --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala @@ -0,0 +1,84 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable +import language.experimental.captureChecking + +trait IndexedSeq[T] extends Seq[T] + with scala.collection.IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] { + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) + +trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] + extends scala.collection.IndexedSeqOps[A, CC, C] + with SeqOps[A, CC, C] { + + /** Modifies this $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return this $coll modified by replacing all elements with the + * result of applying the given function `f` to each element + * of this $coll. + */ + def mapInPlace(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.SeqOps.sorted]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + val len = this.length + if (len > 1) { + val arr = new Array[AnyRef](len) + var i = 0 + for (x <- this) { + arr(i) = x.asInstanceOf[AnyRef] + i += 1 + } + java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) + i = 0 + while (i < arr.length) { + update(i, arr(i).asInstanceOf[A]) + i += 1 + } + } + this + } + + /** Sorts this $coll in place according to a comparison function. + * + * @see [[scala.collection.SeqOps.sortWith]] + */ + def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) + + /** Sorts this $coll in place according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.collection.SeqOps.sortBy]] + */ + def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) + +} From c811e353cded4422ae644b818f1149663916c625 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:19:53 +0100 Subject: [PATCH 058/216] Add Queue.scala to stdlib --- .../stdlib/collection/Iterator.scala | 4 +- .../stdlib/collection/mutable/Queue.scala | 139 ++++++++++++++++++ 2 files changed, 141 insertions(+), 2 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/mutable/Queue.scala diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 68ea4f76c249..172cd7c2a282 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -705,7 +705,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ private[this] var status = 0 private def store(a: A): Unit = { - if (lookahead == null) lookahead = new mutable.Queue[A] + if (lookahead == null) lookahead = new mutable.Queue[A @uncheckedCaptures] lookahead += a } def hasNext = { @@ -868,7 +868,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note Reuse: $consumesOneAndProducesTwoIterators */ def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { - val gap = new scala.collection.mutable.Queue[A] + val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures] var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala new file mode 100644 index 000000000000..8c0b5cea1133 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -0,0 +1,139 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking + + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * @define Coll `mutable.Queue` + * @define coll mutable queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with ArrayDequeOps[A, Queue, Queue[A]] + with Cloneable[Queue[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Queue] = Queue + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Queue" + + /** + * Add elements to the end of this queue + * + * @param elem + * @return this + */ + def enqueue(elem: A): this.type = this += elem + + /** Enqueue two or more elements at the end of the queue. The last element + * of the sequence will be on end of the queue. + * + * @param elems the element sequence. + * @return this + */ + def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) + + /** Enqueues all elements in the given iterable object into the queue. The + * last element in the iterable object will be on front of the new queue. + * + * @param elems the iterable object. + * @return this + */ + def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems + + /** + * Removes the first element from this queue and returns it + * + * @return + * @throws NoSuchElementException when queue is empty + */ + def dequeue(): A = removeHead() + + /** Returns the first element in the queue which satisfies the + * given predicate, and removes this element from the queue. + * + * @param p the predicate used for choosing the first element + * @return the first element of the queue for which p yields true + */ + def dequeueFirst(p: A => Boolean): Option[A] = + removeFirst(p) + + /** Returns all elements in the queue which satisfy the + * given predicate, and removes those elements from the queue. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = + removeAll(p) + + /** + * Returns and dequeues all elements from the queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @return the first element. + */ + @`inline` final def front: A = head + + override protected def klone(): Queue[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = + new Queue(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll queue + * @define Coll `Queue` + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + + def from[sealed A](source: IterableOnce[A]): Queue[A] = empty ++= source + + def empty[sealed A]: Queue[A] = new Queue + + def newBuilder[sealed A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) + +} From f0a1241ede5604952f38a4136627ac29bc83b09c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:20:43 +0100 Subject: [PATCH 059/216] Add PriorityQueue.scala to stdlib --- .../collection/mutable/PriorityQueue.scala | 402 ++++++++++++++++++ 1 file changed, 402 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala new file mode 100644 index 000000000000..5572bdca3cf6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -0,0 +1,402 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.math.Ordering + +/** A heap-based priority queue. + * + * To prioritize elements of type `A` there must be an implicit + * `Ordering[A]` available at creation. Elements are retrieved + * in priority order by using [[dequeue]] or [[dequeueAll]]. + * + * If multiple elements have the same priority as determined by the ordering for this + * `PriorityQueue`, no guarantees are made regarding the order in which those elements + * are returned by `dequeue` or `dequeueAll`. In particular, that means this + * class does not guarantee first-in-first-out behavior, as may be + * incorrectly inferred from the fact that this data structure is + * called a "queue". + * + * Only the `dequeue` and `dequeueAll` methods will return elements in priority + * order (while removing elements from the heap). Standard collection methods + * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary + * iteration order: they will traverse the heap or remove elements + * in whichever order seems most convenient. + * + * Therefore, printing a `PriorityQueue` will not show elements in priority order, + * though the highest-priority element will be printed first. + * To print the elements in order, it's necessary to `dequeue` them. + * To do this non-destructively, duplicate the `PriorityQueue` first; + * the `clone` method is a suitable way to obtain a disposable copy. + * + * Client keys are assumed to be immutable. Mutating keys may violate + * the invariant of the underlying heap-ordered tree. Note that [[clone]] + * does not rebuild the underlying tree. + * + * {{{ + * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) + * + * scala> pq.toList // also not in order + * val res0: List[Int] = List(7, 3, 5, 1, 2) + * + * scala> pq.clone.dequeueAll + * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) + * }}} + * + * @tparam A type of the elements in this priority queue. + * @param ord implicit ordering used to compare the elements of type `A`. + * + * @define Coll PriorityQueue + * @define coll priority queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class PriorityQueue[A](implicit val ord: Ordering[A]) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, Iterable, PriorityQueue[A]] + with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] + with Builder[A, PriorityQueue[A]] + with Cloneable[PriorityQueue[A]] + with Growable[A] + with Serializable +{ + + private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + override def mapInPlace(f: A0 => A0): this.type = { + var i = 1 // see "we do not use array(0)" comment below (???) + val siz = this.size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + def p_size0 = size0 + def p_size0_=(s: Int) = size0 = s + def p_array = array + def p_ensureSize(n: Int) = super.ensureSize(n) + def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n) + def p_swap(a: Int, b: Int): Unit = { + val h = array(a) + array(a) = array(b) + array(b) = h + } + } + + private val resarr = new ResizableArrayAccess[A] + + resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for? + def length: Int = resarr.length - 1 // adjust length accordingly + override def size: Int = length + override def knownSize: Int = length + override def isEmpty: Boolean = resarr.p_size0 < 2 + + // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder + override def empty: PriorityQueue[A] = PriorityQueue.empty + + def mapInPlace(f: A => A): this.type = { + resarr.mapInPlace(f) + heapify(1) + this + } + + def result() = this + + private def toA(x: AnyRef): A = x.asInstanceOf[A] + protected def fixUp(as: Array[AnyRef], m: Int): Unit = { + var k: Int = m + // use `ord` directly to avoid allocating `OrderingOps` + while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { + resarr.p_swap(k, k / 2) + k = k / 2 + } + } + + protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { + // returns true if any swaps were done (used in heapify) + var k: Int = m + while (n >= 2 * k) { + var j = 2 * k + // use `ord` directly to avoid allocating `OrderingOps` + if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) + j += 1 + if (ord.gteq(toA(as(k)), toA(as(j)))) + return k != m + else { + val h = as(k) + as(k) = as(j) + as(j) = h + k = j + } + } + k != m + } + + /** Inserts a single element into the priority queue. + * + * @param elem the element to insert. + * @return this $coll. + */ + def addOne(elem: A): this.type = { + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + fixUp(resarr.p_array, resarr.p_size0) + resarr.p_size0 += 1 + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + val from = resarr.p_size0 + for (x <- xs.iterator) unsafeAdd(x) + heapify(from) + this + } + + private def unsafeAdd(elem: A): Unit = { + // like += but skips fixUp, which breaks the ordering invariant + // a series of unsafeAdds MUST be followed by heapify + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + resarr.p_size0 += 1 + } + + private def heapify(from: Int): Unit = { + // elements at indices 1..from-1 were already in heap order before any adds + // elements at indices from..n are newly added, their order must be fixed + val n = length + + if (from <= 2) { + // no pre-existing order to maintain, do the textbook heapify algorithm + for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) + } + else if (n - from < 4) { + // for very small adds, doing the simplest fix is faster + for (i <- from to n) fixUp(resarr.p_array, i) + } + else { + var min = from/2 // tracks the minimum element in the queue + val queue = scala.collection.mutable.Queue[Int](min) + + // do fixDown on the parents of all the new elements + // except the parent of the first new element, which is in the queue + // (that parent is treated specially because it might be the root) + for (i <- n/2 until min by -1) { + if (fixDown(resarr.p_array, i, n)) { + // there was a swap, so also need to fixDown i's parent + val parent = i/2 + if (parent < min) { // make sure same parent isn't added twice + min = parent + queue += parent + } + } + } + + while (queue.nonEmpty) { + val i = queue.dequeue() + if (fixDown(resarr.p_array, i, n)) { + val parent = i/2 + if (parent < min && parent > 0) { + // the "parent > 0" is to avoid adding the parent of the root + min = parent + queue += parent + } + } + } + } + } + + /** Adds all elements provided by a `IterableOnce` object + * into the priority queue. + * + * @param xs a iterable object. + * @return a new priority queue containing elements of both `xs` and `this`. + */ + def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } + + /** Adds all elements to the queue. + * + * @param elems the elements to add. + */ + def enqueue(elems: A*): Unit = { this ++= elems } + + /** Returns the element with the highest priority in the queue, + * and removes this element from the queue. + * + * @throws NoSuchElementException + * @return the element with the highest priority. + */ + def dequeue(): A = + if (resarr.p_size0 > 1) { + resarr.p_size0 = resarr.p_size0 - 1 + val result = resarr.p_array(1) + resarr.p_array(1) = resarr.p_array(resarr.p_size0) + resarr.p_array(resarr.p_size0) = null // erase reference from array + fixDown(resarr.p_array, 1, resarr.p_size0 - 1) + toA(result) + } else + throw new NoSuchElementException("no element to remove from heap") + + def dequeueAll[A1 >: A]: immutable.Seq[A1] = { + val b = ArrayBuilder.make[Any] + b.sizeHint(size) + while (nonEmpty) { + b += dequeue() + } + immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] + } + + /** Returns the element with the highest priority in the queue, + * or throws an error if there is no element contained in the queue. + * + * @return the element with the highest priority. + */ + override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") + + /** Removes all elements from the queue. After this operation is completed, + * the queue will be empty. + */ + def clear(): Unit = { + resarr.clear() + resarr.p_size0 = 1 + } + + /** Returns an iterator which yields all the elements. + * + * Note: The order of elements returned is undefined. + * If you want to traverse the elements in priority queue + * order, use `clone().dequeueAll.iterator`. + * + * @return an iterator over all the elements. + */ + override def iterator: Iterator[A] = resarr.iterator.drop(1) + + /** Returns the reverse of this priority queue. The new priority queue has + * the same elements as the original, but the opposite ordering. + * + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. + * + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. + */ + def reverse: PriorityQueue[A] = { + val revq = new PriorityQueue[A]()(ord.reverse) + // copy the existing data into the new array backwards + // this won't put it exactly into the correct order, + // but will require less fixing than copying it in + // the original order + val n = resarr.p_size0 + revq.resarr.p_ensureSize(n) + revq.resarr.p_size0 = n + val from = resarr.p_array + val to = revq.resarr.p_array + for (i <- 1 until n) to(i) = from(n-i) + revq.heapify(1) + revq + } + + + /** Returns an iterator which yields all the elements in the reverse order + * than that returned by the method `iterator`. + * + * Note: The order of elements returned is undefined. + * + * @return an iterator over all elements sorted in descending order. + */ + def reverseIterator: Iterator[A] = new AbstractIterator[A] { + private[this] var i = resarr.p_size0 - 1 + def hasNext: Boolean = i >= 1 + def next(): A = { + val n = resarr.p_array(i) + i -= 1 + toA(n) + } + } + + /** Returns a regular queue containing the same elements. + * + * Note: the order of elements is undefined. + */ + def toQueue: Queue[A] = new Queue[A] ++= this.iterator + + /** Returns a textual representation of a queue as a string. + * + * @return the string representation of this queue. + */ + override def toString() = toList.mkString("PriorityQueue(", ", ", ")") + + /** Converts this $coll to a list. + * + * Note: the order of elements is undefined. + * + * @return a list containing all elements of this $coll. + */ + override def toList: immutable.List[A] = immutable.List.from(this.iterator) + + /** This method clones the priority queue. + * + * @return a priority queue with the same elements. + */ + override def clone(): PriorityQueue[A] = { + val pq = new PriorityQueue[A] + val n = resarr.p_size0 + pq.resarr.p_ensureSize(n) + java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) + pq.resarr.p_size0 = n + pq + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if (copied > 0) { + Array.copy(resarr.p_array, 1, xs, start, copied) + } + copied + } + + @deprecated("Use `PriorityQueue` instead", "2.13.0") + def orderedCompanion: PriorityQueue.type = PriorityQueue + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) + + override protected[this] def className = "PriorityQueue" +} + + +@SerialVersionUID(3L) +object PriorityQueue extends SortedIterableFactory[PriorityQueue] { + def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() + } + } + + def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + + def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + val b = newBuilder[E] + b ++= it + b.result() + } +} From 4ac21393f1330d2925315b111eff3184f5f5a5d2 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:24:08 +0100 Subject: [PATCH 060/216] Make PriorityQueue capture checked --- .../collection/mutable/PriorityQueue.scala | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala index 5572bdca3cf6..a395fac4a44a 100644 --- a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -15,6 +15,7 @@ package mutable import scala.collection.generic.DefaultSerializationProxy import scala.math.Ordering +import language.experimental.captureChecking /** A heap-based priority queue. * @@ -66,7 +67,7 @@ import scala.math.Ordering * @define mayNotTerminateInf * @define willNotTerminateInf */ -sealed class PriorityQueue[A](implicit val ord: Ordering[A]) +sealed class PriorityQueue[sealed A](implicit val ord: Ordering[A]) extends AbstractIterable[A] with Iterable[A] with IterableOps[A, Iterable, PriorityQueue[A]] @@ -77,7 +78,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) with Serializable { - private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + private class ResizableArrayAccess[sealed A0] extends ArrayBuffer[A0] { override def mapInPlace(f: A0 => A0): this.type = { var i = 1 // see "we do not use array(0)" comment below (???) val siz = this.size @@ -106,7 +107,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) override def isEmpty: Boolean = resarr.p_size0 < 2 // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) - override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll) override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder override def empty: PriorityQueue[A] = PriorityQueue.empty @@ -161,7 +162,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) this } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { val from = resarr.p_size0 for (x <- xs.iterator) unsafeAdd(x) heapify(from) @@ -364,7 +365,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) pq } - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if (copied > 0) { Array.copy(resarr.p_array, 1, xs, start, copied) @@ -383,7 +384,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) @SerialVersionUID(3L) object PriorityQueue extends SortedIterableFactory[PriorityQueue] { - def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + def newBuilder[sealed A : Ordering]: Builder[A, PriorityQueue[A]] = { new Builder[A, PriorityQueue[A]] { val pq = new PriorityQueue[A] def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } @@ -392,9 +393,9 @@ object PriorityQueue extends SortedIterableFactory[PriorityQueue] { } } - def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + def empty[sealed A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] - def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + def from[sealed E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = { val b = newBuilder[E] b ++= it b.result() From 4795ef84780e4b331ce6fbde87aba383ec70de48 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:27:16 +0100 Subject: [PATCH 061/216] Add Stack.scala and ReusableBuilder.scala to stdlib --- .../collection/mutable/ReusableBuilder.scala | 56 +++++++ .../stdlib/collection/mutable/Stack.scala | 144 ++++++++++++++++++ 2 files changed, 200 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Stack.scala diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala new file mode 100644 index 000000000000..246e525e37d9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala @@ -0,0 +1,56 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import language.experimental.captureChecking + +/** `ReusableBuilder` is a marker trait that indicates that a `Builder` + * can be reused to build more than one instance of a collection. In + * particular, calling `result()` followed by `clear()` will produce a + * collection and reset the builder to begin building a new collection + * of the same type. + * + * In general no method other than `clear()` may be called after `result()`. + * It is up to subclasses to implement and to document other allowed sequences + * of operations (e.g. calling other methods after `result()` in order to obtain + * different snapshots of a collection under construction). + * + * @tparam Elem the type of elements that get added to the builder. + * @tparam To the type of collection that it produced. + * + * @define multipleResults + * + * This Builder can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ +trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { + /** Clears the contents of this builder. + * After execution of this method, the builder will contain no elements. + * + * If executed immediately after a call to `result()`, this allows a new + * instance of the same type of collection to be built. + */ + override def clear(): Unit // Note: overriding for Scaladoc only! + + /** Produces a collection from the added elements. + * + * After a call to `result`, the behavior of all other methods is undefined + * save for `clear()`. If `clear()` is called, then the builder is reset and + * may be used to build another instance. + * + * @return a collection containing the elements added to this builder. + */ + override def result(): To // Note: overriding for Scaladoc only! +} diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala new file mode 100644 index 000000000000..d39afe4d099e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -0,0 +1,144 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.annotation.{migration, nowarn} +import scala.collection.generic.DefaultSerializable +import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} + +import language.experimental.captureChecking + +/** A stack implements a data structure which allows to store and retrieve + * objects in a last-in-first-out (LIFO) fashion. + * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * + * @tparam A type of the elements contained in this stack. + * + * @define Coll `Stack` + * @define coll stack + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") +class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Stack, Stack[A]] + with StrictOptimizedSeqOps[A, Stack, Stack[A]] + with IterableFactoryDefaults[A, Stack] + with ArrayDequeOps[A, Stack, Stack[A]] + with Cloneable[Stack[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Stack] = Stack + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Stack" + + /** + * Add elements to the top of this stack + * + * @param elem + * @return + */ + def push(elem: A): this.type = prepend(elem) + + /** Push two or more elements onto the stack. The last element + * of the sequence will be on top of the new stack. + * + * @param elems the element sequence. + * @return the stack with the new elements on top. + */ + def push(elem1: A, elem2: A, elems: A*): this.type = { + val k = elems.knownSize + ensureSize(length + (if(k >= 0) k + 2 else 3)) + prepend(elem1).prepend(elem2).pushAll(elems) + } + + /** Push all elements in the given iterable object onto the stack. The + * last element in the iterable object will be on top of the new stack. + * + * @param elems the iterable object. + * @return the stack with the new elements on top. + */ + def pushAll(elems: scala.collection.IterableOnce[A]): this.type = + prependAll(elems match { + case it: scala.collection.Seq[A] => it.view.reverse + case it => IndexedSeq.from(it).view.reverse + }) + + /** + * Removes the top element from this stack and return it + * + * @return + * @throws NoSuchElementException when stack is empty + */ + def pop(): A = removeHead() + + /** + * Pop all elements from this stack and return it + * + * @return The removed elements + */ + def popAll(): scala.collection.Seq[A] = removeAll() + + /** + * Returns and removes all elements from the top of this stack which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the top element of the stack. This method will not remove + * the element from the stack. An error is signaled if there is no + * element on the stack. + * + * @throws NoSuchElementException + * @return the top element + */ + @`inline` final def top: A = head + + override protected def klone(): Stack[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = + new Stack(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll stack + * @define Coll `Stack` + */ +@SerialVersionUID(3L) +object Stack extends StrictOptimizedSeqFactory[Stack] { + + def from[sealed A](source: IterableOnce[A]): Stack[A] = empty ++= source + + def empty[sealed A]: Stack[A] = new Stack + + def newBuilder[sealed A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) + +} From 0bf190359bd0e98e5b61225d4486fccda3d9c86c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:28:07 +0100 Subject: [PATCH 062/216] Add Set.scala to stdlib --- tests/pos-special/stdlib/collection/Set.scala | 269 ++++++++++++++++++ 1 file changed, 269 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/Set.scala diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala new file mode 100644 index 000000000000..0ea1e5689473 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -0,0 +1,269 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.util.hashing.MurmurHash3 +import java.lang.String + +import scala.annotation.nowarn + +/** Base trait for set collections. + */ +trait Set[A] + extends Iterable[A] + with SetOps[A, Set, Set[A]] + with Equals + with IterableFactoryDefaults[A, Set] { + + def canEqual(that: Any) = true + + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ + override def equals(that: Any): Boolean = + (this eq that.asInstanceOf[AnyRef]) || (that match { + case set: Set[A @unchecked] if set.canEqual(this) => + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.setHash(this) + + override def iterableFactory: IterableFactory[Set] = Set + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Set" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too +} + +/** Base trait for set operations + * + * @define coll set + * @define Coll `Set` + */ +trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends IterableOps[A, CC, C] + with (A => Boolean) { + + def contains(elem: A): Boolean + + /** Tests if some element is contained in this set. + * + * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. + * @param elem the element to test for membership. + * @return `true` if `elem` is contained in this set, `false` otherwise. + */ + @`inline` final def apply(elem: A): Boolean = this.contains(elem) + + /** Tests whether this set is a subset of another set. + * + * @param that the set to test. + * @return `true` if this set is a subset of `that`, i.e. if + * every element of this set is also an element of `that`. + */ + def subsetOf(that: Set[A]): Boolean = this.forall(that) + + /** An iterator over all subsets of this set of the given size. + * If the requested size is impossible, an empty iterator is returned. + * + * @param len the size of the subsets. + * @return the iterator. + */ + def subsets(len: Int): Iterator[C] = { + if (len < 0 || len > size) Iterator.empty + else new SubsetsItr(this.to(IndexedSeq), len) + } + + /** An iterator over all subsets of this set. + * + * @return the iterator. + */ + def subsets(): Iterator[C] = new AbstractIterator[C] { + private[this] val elms = SetOps.this.to(IndexedSeq) + private[this] var len = 0 + private[this] var itr: Iterator[C] = Iterator.empty + + def hasNext = len <= elms.size || itr.hasNext + def next() = { + if (!itr.hasNext) { + if (len > elms.size) Iterator.empty.next() + else { + itr = new SubsetsItr(elms, len) + len += 1 + } + } + + itr.next() + } + } + + /** An Iterator including all subsets containing exactly len elements. + * If the elements in 'This' type is ordered, then the subsets will also be in the same order. + * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} + * + * $willForceEvaluation + * + */ + private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { + private[this] val idxs = Array.range(0, len+1) + private[this] var _hasNext = true + idxs(len) = elms.size + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) Iterator.empty.next() + + val buf = newSpecificBuilder + idxs.slice(0, len) foreach (idx => buf += elms(idx)) + val result = buf.result() + + var i = len - 1 + while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 + + if (i < 0) _hasNext = false + else { + idxs(i) += 1 + for (j <- (i+1) until len) + idxs(j) = idxs(j-1) + 1 + } + + result + } + } + + /** Computes the intersection between this set and another set. + * + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def intersect(that: Set[A]): C = this.filter(that) + + /** Alias for `intersect` */ + @`inline` final def & (that: Set[A]): C = intersect(that) + + /** Computes the difference of this set and another set. + * + * @param that the set of elements to exclude. + * @return a set containing those elements of this + * set that are not also contained in the given set `that`. + */ + def diff(that: Set[A]): C + + /** Alias for `diff` */ + @`inline` final def &~ (that: Set[A]): C = this diff that + + @deprecated("Consider requiring an immutable Set", "2.13.0") + def -- (that: IterableOnce[A]): C = { + val toRemove = that.iterator.to(immutable.Set) + fromSpecific(view.filterNot(toRemove)) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") + def - (elem: A): C = diff(Set(elem)) + + @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") + def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) + + /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. + * + * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. + * + * Example: + * {{{ + * scala> val a = Set(1, 2) concat Set(2, 3) + * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) + * }}} + * + * @param that the collection containing the elements to add. + * @return a new $coll with the given elements added, omitting duplicates. + */ + def concat(that: collection.IterableOnce[A]): C = this match { + case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => + // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) + var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] + val it = that.iterator + while (it.hasNext) result = result + it.next() + result.asInstanceOf[C] + case _ => fromSpecific(that match { + case that: collection.Iterable[A] => new View.Concat(this, that) + case _ => iterator.concat(that.iterator) + }) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + /** Alias for `concat` */ + @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) + + /** Computes the union between of set and another set. + * + * @param that the set to form the union with. + * @return a new set consisting of all elements that are in this + * set or in the given set `that`. + */ + @`inline` final def union(that: Set[A]): C = concat(that) + + /** Alias for `union` */ + @`inline` final def | (that: Set[A]): C = concat(that) +} + +/** + * $factoryInfo + * @define coll set + * @define Coll `Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](immutable.Set) + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] From 35e71668f8bbbc737643800a2f550517c1532a00 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:33:38 +0100 Subject: [PATCH 063/216] Make collection/Set capture checked --- tests/pos-special/stdlib/collection/Set.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala index 0ea1e5689473..75707f204c02 100644 --- a/tests/pos-special/stdlib/collection/Set.scala +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -17,6 +17,7 @@ import scala.util.hashing.MurmurHash3 import java.lang.String import scala.annotation.nowarn +import language.experimental.captureChecking /** Base trait for set collections. */ @@ -25,6 +26,7 @@ trait Set[A] with SetOps[A, Set, Set[A]] with Equals with IterableFactoryDefaults[A, Set] { + self: Set[A] => def canEqual(that: Any) = true @@ -86,8 +88,7 @@ trait Set[A] * @define Coll `Set` */ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends IterableOps[A, CC, C] - with (A => Boolean) { + extends IterableOps[A, CC, C], (A -> Boolean) { self => def contains(elem: A): Boolean @@ -234,7 +235,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] case that: collection.Iterable[A] => new View.Concat(this, that) case _ => iterator.concat(that.iterator) }) - } + } @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) From 14b5093fa961c4c2f0a75a4d333dcdb6bd203646 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:35:03 +0100 Subject: [PATCH 064/216] Add mutable/immutable Set to stdlib --- .../stdlib/collection/immutable/Set.scala | 398 ++++++++++++++++++ .../stdlib/collection/mutable/Set.scala | 122 ++++++ 2 files changed, 520 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/Set.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Set.scala diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala new file mode 100644 index 000000000000..f07eb66991c8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -0,0 +1,398 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.immutable.Set.Set4 +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** Base trait for immutable set collections */ +trait Set[A] extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + override def iterableFactory: IterableFactory[Set] = Set +} + +/** Base trait for immutable set operations + * + * @define coll immutable set + * @define Coll `immutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] { + + /** Creates a new set with an additional element, unless the element is + * already present. + * + * @param elem the element to be added + * @return a new set that contains all elements of this set and that also + * contains `elem`. + */ + def incl(elem: A): C + + /** Alias for `incl` */ + override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated + + /** Creates a new set with a given element removed from this set. + * + * @param elem the element to be removed + * @return a new set that contains all elements of this set but that does not + * contain `elem`. + */ + def excl(elem: A): C + + /** Alias for `excl` */ + @`inline` final override def - (elem: A): C = excl(elem) + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param that the collection containing the elements to remove. + * @return a new $coll with the given elements removed, omitting duplicates. + */ + def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for removedAll */ + override final def -- (that: IterableOnce[A]): C = removedAll(that) +} + +trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with collection.StrictOptimizedSetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: collection.IterableOnce[A]): C = { + var result: C = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +/** + * $factoryInfo + * @define coll immutable set + * @define Coll `immutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory[Set] { + + def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] + + def from[E](it: collection.IterableOnce[E]): Set[E] = + it match { + // We want `SortedSet` (and subclasses, such as `BitSet`) to + // rebuild themselves to avoid element type widening issues + case _: SortedSet[E] => (newBuilder[E] ++= it).result() + case _ if it.knownSize == 0 => empty[E] + case s: Set[E] => s + case _ => (newBuilder[E] ++= it).result() + } + + def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] + + /** An optimized representation for immutable empty sets */ + @SerialVersionUID(3L) + private object EmptySet extends AbstractSet[Any] with Serializable { + override def size: Int = 0 + override def isEmpty = true + override def knownSize: Int = size + override def filter(pred: Any => Boolean): Set[Any] = this + override def filterNot(pred: Any => Boolean): Set[Any] = this + override def removedAll(that: IterableOnce[Any]): Set[Any] = this + override def diff(that: collection.Set[Any]): Set[Any] = this + override def subsetOf(that: collection.Set[Any]): Boolean = true + override def intersect(that: collection.Set[Any]): Set[Any] = this + override def view: View[Any] = View.empty + def contains(elem: Any): Boolean = false + def incl(elem: Any): Set[Any] = new Set1(elem) + def excl(elem: Any): Set[Any] = this + def iterator: Iterator[Any] = Iterator.empty + override def foreach[U](f: Any => U): Unit = () + } + private[collection] def emptyInstance: Set[Any] = EmptySet + + @SerialVersionUID(3L) + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { + private[this] var current = 0 + private[this] var remainder = n + override def knownSize: Int = remainder + def hasNext = remainder > 0 + def apply(i: Int): A + def next(): A = + if (hasNext) { + val r = apply(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + } + + /** An optimized representation for immutable sets of size 1 */ + @SerialVersionUID(3L) + final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 1 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set2(elem1, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) Set.empty + else this + def iterator: Iterator[A] = Iterator.single(elem1) + override def foreach[U](f: A => U): Unit = f(elem1) + override def exists(p: A => Boolean): Boolean = p(elem1) + override def forall(p: A => Boolean): Boolean = p(elem1) + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = + if (pred(elem1) != isFlipped) this else Set.empty + + override def find(p: A => Boolean): Option[A] = + if (p(elem1)) Some(elem1) + else None + override def head: A = elem1 + override def tail: Set[A] = Set.empty + } + + /** An optimized representation for immutable sets of size 2 */ + @SerialVersionUID(3L) + final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 2 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 || elem == elem2 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set3(elem1, elem2, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set1(elem2) + else if (elem == elem2) new Set1(elem1) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set1(elem2) + } + + /** An optimized representation for immutable sets of size 3 */ + @SerialVersionUID(3L) + final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 3 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set4(elem1, elem2, elem3, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set2(elem2, elem3) + else if (elem == elem2) new Set2(elem1, elem3) + else if (elem == elem3) new Set2(elem1, elem2) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set2(elem2, elem3) + } + + /** An optimized representation for immutable sets of size 4 */ + @SerialVersionUID(3L) + final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 4 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem + def excl(elem: A): Set[A] = + if (elem == elem1) new Set3(elem2, elem3, elem4) + else if (elem == elem2) new Set3(elem1, elem3, elem4) + else if (elem == elem3) new Set3(elem1, elem2, elem4) + else if (elem == elem4) new Set3(elem1, elem2, elem3) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3); f(elem4) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) || p(elem4) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) && p(elem4) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2, r3: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1} + if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => new Set3(r1, r2, r3) + case 4 => this + } + } + + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else if (p(elem4)) Some(elem4) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set3(elem2, elem3, elem4) + + private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = + builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) + } +} + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] + +/** Builder for Set. + * $multipleResults + */ +private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { + private[this] var elems: Set[A] = Set.empty + private[this] var switchedToHashSetBuilder: Boolean = false + private[this] var hashSetBuilder: HashSetBuilder[A] = _ + + override def clear(): Unit = { + elems = Set.empty + if (hashSetBuilder != null) { + hashSetBuilder.clear() + } + switchedToHashSetBuilder = false + } + + override def result(): Set[A] = + if (switchedToHashSetBuilder) hashSetBuilder.result() else elems + + def addOne(elem: A) = { + if (switchedToHashSetBuilder) { + hashSetBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem)) { + () // do nothing + } else { + switchedToHashSetBuilder = true + if (hashSetBuilder == null) { + hashSetBuilder = new HashSetBuilder + } + elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) + hashSetBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[A]): this.type = + if (switchedToHashSetBuilder) { + hashSetBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala new file mode 100644 index 000000000000..6530e8fedf05 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} + +/** Base trait for mutable sets */ +trait Set[A] + extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + + override def iterableFactory: IterableFactory[Set] = Set +} + +/** + * @define coll mutable set + * @define Coll `mutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] + with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below + with Cloneable[C] + with Builder[A, C] + with Growable[A] + with Shrinkable[A] { + + def result(): C = coll + + /** Check whether the set contains the given element, and add it if not. + * + * @param elem the element to be added + * @return true if the element was added + */ + def add(elem: A): Boolean = + !contains(elem) && { + coll += elem; true + } + + /** Updates the presence of a single element in this set. + * + * This method allows one to add or remove an element `elem` + * from this set depending on the value of parameter `included`. + * Typically, one would use the following syntax: + * {{{ + * set(elem) = true // adds element + * set(elem) = false // removes element + * }}} + * + * @param elem the element to be added or removed + * @param included a flag indicating whether element should be included or excluded. + */ + def update(elem: A, included: Boolean): Unit = { + if (included) add(elem) + else remove(elem) + } + + /** Removes an element from this set. + * + * @param elem the element to be removed + * @return true if this set contained the element before it was removed + */ + def remove(elem: A): Boolean = { + val res = contains(elem) + coll -= elem + res + } + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) + + /** Removes all elements from the set for which do not satisfy a predicate. + * @param p the predicate used to test elements. Only elements for + * which `p` returns `true` are retained in the set; all others + * are removed. + */ + def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val elem = array(i).asInstanceOf[A] + if (!p(elem)) { + this -= elem + } + i += 1 + } + } + this + } + + override def clone(): C = empty ++= this + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable set + * @define Coll `mutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](HashSet) + + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] From 1f414a3b9d13890ccfe67becb8cb222371042f3c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:40:53 +0100 Subject: [PATCH 065/216] Make mutable/immutable Set capture checked --- .../stdlib/collection/immutable/Set.scala | 12 +++++++----- .../pos-special/stdlib/collection/mutable/Set.scala | 1 + 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala index f07eb66991c8..ac92f81b2013 100644 --- a/tests/pos-special/stdlib/collection/immutable/Set.scala +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -16,6 +16,8 @@ package immutable import scala.collection.immutable.Set.Set4 import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** Base trait for immutable set collections */ trait Set[A] extends Iterable[A] @@ -94,7 +96,7 @@ object Set extends IterableFactory[Set] { def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] - def from[E](it: collection.IterableOnce[E]): Set[E] = + def from[E](it: collection.IterableOnce[E]^): Set[E] = it match { // We want `SortedSet` (and subclasses, such as `BitSet`) to // rebuild themselves to avoid element type widening issues @@ -128,7 +130,7 @@ object Set extends IterableFactory[Set] { private[collection] def emptyInstance: Set[Any] = EmptySet @SerialVersionUID(3L) - private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure { private[this] var current = 0 private[this] var remainder = n override def knownSize: Int = remainder @@ -351,9 +353,9 @@ abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A * $multipleResults */ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { - private[this] var elems: Set[A] = Set.empty + private[this] var elems: Set[A @uncheckedCaptures] = Set.empty private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A] = _ + private[this] var hashSetBuilder: HashSetBuilder[A @uncheckedCaptures] = _ override def clear(): Unit = { elems = Set.empty @@ -388,7 +390,7 @@ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { this } - override def addAll(xs: IterableOnce[A]): this.type = + override def addAll(xs: IterableOnce[A]^): this.type = if (switchedToHashSetBuilder) { hashSetBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala index 6530e8fedf05..01384e993e89 100644 --- a/tests/pos-special/stdlib/collection/mutable/Set.scala +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -13,6 +13,7 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} +import language.experimental.captureChecking /** Base trait for mutable sets */ trait Set[A] From f92c881562dedd6da200617f4f222f84e24c5525 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:48:19 +0100 Subject: [PATCH 066/216] Add BitSet classes to stdlib (capture checked) --- .../stdlib/collection/BitSet.scala | 348 ++++++++++++++++ .../stdlib/collection/immutable/BitSet.scala | 376 +++++++++++++++++ .../stdlib/collection/mutable/BitSet.scala | 393 ++++++++++++++++++ 3 files changed, 1117 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/BitSet.scala diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala new file mode 100644 index 000000000000..62326dfd2152 --- /dev/null +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -0,0 +1,348 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.Builder +import language.experimental.captureChecking + +/** Base type of bitsets. + * + * This trait provides most of the operations of a `BitSet` independently of its representation. + * It is inherited by all concrete implementations of bitsets. + * + * @define bitsetinfo + * Bitsets are sets of non-negative integers which are represented as + * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is + * determined by the largest number stored in it. + * @define coll bitset + * @define Coll `BitSet` + */ +trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "BitSet" + override def unsorted: Set[Int] = this +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." + + def empty: BitSet = immutable.BitSet.empty + def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder + def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + + @SerialVersionUID(3L) + private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { + + @transient protected var elems: Array[Long] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val nwords = coll.nwords + out.writeInt(nwords) + var i = 0 + while(i < nwords) { + out.writeLong(coll.word(i)) + i += 1 + } + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val nwords = in.readInt() + elems = new Array[Long](nwords) + var i = 0 + while(i < nwords) { + elems(i) = in.readLong() + i += 1 + } + } + + protected[this] def readResolve(): Any + } +} + +/** Base implementation type of bitsets */ +trait BitSetOps[+C <: BitSet with BitSetOps[C]] + extends SortedSetOps[Int, SortedSet, C] { self => + import BitSetOps._ + + def bitSetFactory: SpecificIterableFactory[Int, C] + + def unsorted: Set[Int] + + final def ordering: Ordering[Int] = Ordering.Int + + /** The number of words (each with 64 bits) making up the set */ + protected[collection] def nwords: Int + + /** The words at index `idx`, or 0L if outside the range of the set + * '''Note:''' requires `idx >= 0` + */ + protected[collection] def word(idx: Int): Long + + /** Creates a new set of this kind from an array of longs + */ + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C + + def contains(elem: Int): Boolean = + 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L + + def iterator: Iterator[Int] = iteratorFrom(0) + + def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var currentPos = if (start > 0) start >> LogWL else 0 + private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) + final override def hasNext: Boolean = { + while (currentWord == 0) { + if (currentPos + 1 >= nwords) return false + currentPos += 1 + currentWord = word(currentPos) + } + true + } + final override def next(): Int = { + if (hasNext) { + val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) + currentWord &= currentWord - 1 + (currentPos << LogWL) + bitPos + } else Iterator.empty.next() + } + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = scala.collection.convert.impl.BitSetStepper.from(this) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def size: Int = { + var s = 0 + var i = nwords + while (i > 0) { + i -= 1 + s += java.lang.Long.bitCount(word(i)) + } + s + } + + override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) + + @inline private[this] def smallestInt: Int = { + val thisnwords = nwords + var i = 0 + while(i < thisnwords) { + val currentWord = word(i) + if (currentWord != 0L) { + return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) + } + i += 1 + } + throw new UnsupportedOperationException("empty.smallestInt") + } + + @inline private[this] def largestInt: Int = { + var i = nwords - 1 + while(i >= 0) { + val currentWord = word(i) + if (currentWord != 0L) { + return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 + } + i -= 1 + } + throw new UnsupportedOperationException("empty.largestInt") + } + + override def max[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) largestInt + else if (Ordering.Int isReverseOf ord) smallestInt + else super.max(ord) + + + override def min[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) smallestInt + else if (Ordering.Int isReverseOf ord) largestInt + else super.min(ord) + + override def foreach[U](f: Int => U): Unit = { + /* NOTE: while loops are significantly faster as of 2.11 and + one major use case of bitsets is performance. Also, there + is nothing to do when all bits are clear, so use that as + the inner loop condition. */ + var i = 0 + while (i < nwords) { + var w = word(i) + var j = i * WordLength + while (w != 0L) { + if ((w&1L) == 1L) f(j) + w = w >>> 1 + j += 1 + } + i += 1 + } + } + + /** Creates a bit mask for this set as a new array of longs + */ + def toBitMask: Array[Long] = { + val a = new Array[Long](nwords) + var i = a.length + while(i > 0) { + i -= 1 + a(i) = word(i) + } + a + } + + def rangeImpl(from: Option[Int], until: Option[Int]): C = { + val a = coll.toBitMask + val len = a.length + if (from.isDefined) { + val f = from.get + val w = f >> LogWL + val b = f & (WordLength - 1) + if (w >= 0) { + java.util.Arrays.fill(a, 0, math.min(w, len), 0) + if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) + } + } + if (until.isDefined) { + val u = until.get + val w = u >> LogWL + val b = u & (WordLength - 1) + if (w < len) { + java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) + if (w >= 0) a(w) &= (1L << b) - 1 + } + } + coll.fromBitMaskNoCopy(a) + } + + override def concat(other: collection.IterableOnce[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords max otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) | otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.concat(other) + } + + override def intersect(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords min otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.intersect(other) + } + + abstract override def diff(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & ~otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.diff(other) + } + + /** Computes the symmetric difference of this bitset and another bitset by performing + * a bitwise "exclusive-or". + * + * @param other the other bitset to take part in the symmetric difference. + * @return a bitset containing those bits of this + * bitset or the other bitset that are not contained in both bitsets. + */ + def xor(other: BitSet): C = { + val len = coll.nwords max other.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = coll.word(idx) ^ other.word(idx) + coll.fromBitMaskNoCopy(words) + } + + @`inline` final def ^ (other: BitSet): C = xor(other) + + /** + * Builds a new bitset by applying a function to all elements of this bitset + * @param f the function to apply to each element. + * @return a new bitset resulting from applying the given function ''f'' to + * each element of this bitset and collecting the results + */ + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) + + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) + + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) + + override def partition(p: Int => Boolean): (C, C) = { + val left = filter(p) + (left, this &~ left) + } +} + +object BitSetOps { + + /* Final vals can sometimes be inlined as constants (faster) */ + private[collection] final val LogWL = 6 + private[collection] final val WordLength = 64 + private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 + + private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { + var len = elems.length + while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 + var newlen = len + if (idx >= newlen && w != 0L) newlen = idx + 1 + val newelems = new Array[Long](newlen) + Array.copy(elems, 0, newelems, 0, len) + if (idx < newlen) newelems(idx) = w + else assert(w == 0L) + newelems + } + + private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = + if (oldWord == 0L) 0L else { + var w = oldWord + val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) + var jmask = 1L << trailingZeroes + var j = wordIndex * BitSetOps.WordLength + trailingZeroes + val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) + while (j != maxJ) { + if ((w & jmask) != 0L) { + if (pred(j) == isFlipped) { + // j did not pass the filter here + w = w & ~jmask + } + } + jmask = jmask << 1 + j += 1 + } + w + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala new file mode 100644 index 000000000000..109f32d6f49a --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -0,0 +1,376 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import BitSetOps.{LogWL, updateArray} +import mutable.Builder +import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking + +/** A class for immutable bitsets. + * $bitsetinfo + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] + * section on `Immutable BitSets` for more information. + * + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +sealed abstract class BitSet + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + override def unsorted: Set[Int] = this + + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) + + def incl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) this + else { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + } + + def excl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } else this + } + + /** Update word at index `idx`; enlarge set if `idx` outside range of set. + */ + protected def updateWord(idx: Int, w: Long): BitSet + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) +} + +/** + * $factoryInfo + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + it match { + case bs: BitSet => bs + case _ => (newBuilder ++= it).result() + } + + final val empty: BitSet = new BitSet1(0L) + + def newBuilder: Builder[Int, BitSet] = + mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) + + private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSetN(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else new BitSetN(elems) + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet1(val elems: Long) extends BitSet { + protected[collection] def nwords = 1 + protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet1(w) + else if (idx == 1) createSmall(elems, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case _ => + val newElems = elems & ~bs.word(0) + if (newElems == 0L) this.empty else new BitSet1(newElems) + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) + if (_elems == 0L) this.empty else new BitSet1(_elems) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { + protected[collection] def nwords = 2 + protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet2(w, elems1) + else if (idx == 1) createSmall(elems0, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case 1 => + new BitSet2(elems0 & ~bs.word(0), elems1) + case _ => + val _elems0 = elems0 & ~bs.word(0) + val _elems1 = elems1 & ~bs.word(1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } else { + new BitSet1(_elems0) + } + } else { + new BitSet2(_elems0, _elems1) + } + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) + val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } + else new BitSet1(_elems0) + } + else new BitSet2(_elems0, _elems1) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSetN(val elems: Array[Long]) extends BitSet { + protected[collection] def nwords = elems.length + + protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L + + protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. Two extra concerns for optimization are described below. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + * + * Tracking Changes: + * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, + * we check each word for if it has changed from its corresponding word in `this`. Once a single change is + * detected, we stop checking because the cost of the new Array must be paid anyways. + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = oldFirstWord & ~bs.word(0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } else { + var i = bsnwords - 1 + var anyChanges = false + var currentWord = 0L + while (i >= 0 && !anyChanges) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newElems = elems.clone() + newElems(i + 1) = currentWord + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + this.fromBitMaskNoCopy(newElems) + } else { + this + } + } + case _ => super.diff(that) + } + + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = nwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } + + override def toBitMask: Array[Long] = elems.clone() + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala new file mode 100644 index 000000000000..17005fd16a9f --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -0,0 +1,393 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.immutable.Range +import BitSetOps.{LogWL, MaxSize} +import scala.annotation.implicitNotFound +import language.experimental.captureChecking + +/** + * A class for mutable bitsets. + * + * $bitsetinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] + * section on `Mutable Bitsets` for more information. + * + * @define Coll `BitSet` + * @define coll bitset + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class BitSet(protected[collection] final var elems: Array[Long]) + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedIterableOps[Int, Set, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) + + def this() = this(0) + + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + override def unsorted: Set[Int] = this + + protected[collection] final def nwords: Int = elems.length + + protected[collection] final def word(idx: Int): Long = + if (idx < nwords) elems(idx) else 0L + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = + if (elems.length == 0) empty + else new BitSet(elems) + + def addOne(elem: Int): this.type = { + require(elem >= 0) + if (!contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + this + } + + def subtractOne(elem: Int): this.type = { + require(elem >= 0) + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } + this + } + + def clear(): Unit = { + elems = new Array[Long](elems.length) + } + + protected final def updateWord(idx: Int, w: Long): Unit = { + ensureCapacity(idx) + elems(idx) = w + } + + protected final def ensureCapacity(idx: Int): Unit = { + require(idx < MaxSize) + if (idx >= nwords) { + var newlen = nwords + while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) + val elems1 = new Array[Long](newlen) + Array.copy(elems, 0, elems1, 0, nwords) + elems = elems1 + } + } + + def unconstrained: collection.Set[Int] = this + + /** Updates this bitset to the union with another bitset by performing a bitwise "or". + * + * @param other the bitset to form the union with. + * @return the bitset itself. + */ + def |= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + elems(i) = elems(i) | other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". + * + * @param other the bitset to form the intersection with. + * @return the bitset itself. + */ + def &= (other: collection.BitSet): this.type = { + // Different from other operations: no need to ensure capacity because + // anything beyond the capacity is 0. Since we use other.word which is 0 + // off the end, we also don't need to make sure we stay in bounds there. + var i = 0 + val thisnwords = nwords + while (i < thisnwords) { + elems(i) = elems(i) & other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". + * + * @param other the bitset to form the symmetric difference with. + * @return the bitset itself. + */ + def ^= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + + elems(i) = elems(i) ^ other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". + * + * @param other the bitset to form the difference with. + * @return the bitset itself. + */ + def &~= (other: collection.BitSet): this.type = { + var i = 0 + val max = Math.min(nwords, other.nwords) + while (i < max) { + elems(i) = elems(i) & ~other.word(i) + i += 1 + } + this + } + + override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) + + def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + override def addAll(xs: IterableOnce[Int]^): this.type = xs match { + case bs: collection.BitSet => + this |= bs + case range: Range => + if (range.nonEmpty) { + val start = range.min + if (start >= 0) { + val end = range.max + val endIdx = end >> LogWL + ensureCapacity(endIdx) + + if (range.step == 1 || range.step == -1) { + val startIdx = start >> LogWL + val wordStart = startIdx * BitSetOps.WordLength + val wordMask = -1L << (start - wordStart) + + if (endIdx > startIdx) { + elems(startIdx) |= wordMask + java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) + elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) + } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) + } else super.addAll(range) + } else super.addAll(range) + } + this + + case sorted: collection.SortedSet[Int] => + // if `sorted` is using the regular Int ordering, ensure capacity for the largest + // element up front to avoid multiple resizing allocations + if (sorted.nonEmpty) { + val ord = sorted.ordering + if (ord eq Ordering.Int) { + ensureCapacity(sorted.lastKey >> LogWL) + } else if (ord eq Ordering.Int.reverse) { + ensureCapacity(sorted.firstKey >> LogWL) + } + val iter = sorted.iterator + while (iter.hasNext) { + addOne(iter.next()) + } + } + + this + + case other => + super.addAll(other) + } + + override def subsetOf(that: collection.Set[Int]): Boolean = that match { + case bs: collection.BitSet => + val thisnwords = this.nwords + val bsnwords = bs.nwords + val minWords = Math.min(thisnwords, bsnwords) + + // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there + var i = bsnwords + while (i < thisnwords) { + if (word(i) != 0L) return false + i += 1 + } + + // the higher range of `this` is all `0`s, fall back to lower range + var j = 0 + while (j < minWords) { + if ((word(j) & ~bs.word(j)) != 0L) return false + j += 1 + } + + true + case other => + super.subsetOf(other) + } + + override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match { + case bs: collection.BitSet => this &~= bs + case other => super.subtractAll(other) + } + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + i -= 1 + } + + if (i < 0) { + fromBitMaskNoCopy(Array(currentWord)) + } else { + val minimumNonZeroIndex: Int = i + 1 + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newArray) + } + } else { + // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index + val newElems = elems.clone() + var i = bsnwords - 1 + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newElems) + } + case _ => super.diff(that) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word + // index which lets us avoid: + // * over-allocating -- the resulting array will be exactly the right size + // * multiple resizing allocations -- the array is allocated one time, not log(n) times. + var i = nwords - 1 + var newArray: Array[Long] = null + while (i >= 0) { + val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + if (w != 0L) { + if (newArray eq null) { + newArray = new Array(i + 1) + } + newArray(i) = w + } + i -= 1 + } + if (newArray eq null) { + empty + } else { + fromBitMaskNoCopy(newArray) + } + } + + override def filterInPlace(p: Int => Boolean): this.type = { + val thisnwords = nwords + var i = 0 + while (i < thisnwords) { + elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) + i += 1 + } + this + } + + override def toBitMask: Array[Long] = elems.clone() +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) + + def empty: BitSet = new BitSet() + + def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSet(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else new BitSet(elems) + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} From 6a831aafdacc2e2de39c3e554573edb04c5331ab Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:49:21 +0100 Subject: [PATCH 067/216] Add SortedSet classes to stdlib --- .../stdlib/collection/SortedSet.scala | 189 ++++++++++++++++++ .../collection/immutable/SortedSet.scala | 57 ++++++ .../stdlib/collection/mutable/SortedSet.scala | 48 +++++ 3 files changed, 294 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SortedSet.scala diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala new file mode 100644 index 000000000000..c98ca9ae5523 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -0,0 +1,189 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.{implicitNotFound, nowarn} +import scala.annotation.unchecked.uncheckedVariance + +/** Base type of sorted sets */ +trait SortedSet[A] extends Set[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + def unsorted: Set[A] = this + + def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedSet" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => + (ss canEqual this) && + (this.size == ss.size) && { + val i1 = this.iterator + val i2 = ss.iterator + var allEqual = true + while (allEqual && i1.hasNext) + allEqual = ordering.equiv(i1.next(), i2.next()) + allEqual + } + case _ => + super.equals(that) + } + +} + +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with SortedOps[A, C] { + + /** The companion object of this sorted set, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedIterableFactory: SortedIterableFactory[CC] + + def unsorted: Set[A] + + /** + * Creates an iterator that contains all values from this collection + * greater than or equal to `start` according to the ordering of + * this collection. x.iteratorFrom(y) is equivalent to but will usually + * be more efficient than x.from(y).iterator + * + * @param start The lower-bound (inclusive) of the iterator + */ + def iteratorFrom(start: A): Iterator[A] + + @deprecated("Use `iteratorFrom` instead.", "2.13.0") + @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) + + def firstKey: A = head + def lastKey: A = last + + /** Find the smallest element larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: A): Option[A] = rangeFrom(key).headOption + + /** Find the largest element less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption + + override def min[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.min") + else if (ord == ordering) head + else if (ord isReverseOf ordering) last + else super.min[B] // need the type annotation for it to infer the correct implicit + + override def max[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.max") + else if (ord == ordering) last + else if (ord isReverseOf ordering) head + else super.max[B] // need the type annotation for it to infer the correct implicit + + def rangeTo(to: A): C = { + val i = rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + /** Builds a new sorted collection by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Map(this, f)) + + /** Builds a new sorted collection by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.FlatMap(this, f)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote + sortedIterableFactory.from(that match { + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + /** Builds a new sorted collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Collect(this, pf)) +} + +object SortedSetOps { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + + /** Specialize `WithFilter` for sorted collections + * + * @define coll sorted collection + */ + class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( + self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], + p: A => Boolean + ) extends IterableOps.WithFilter[A, IterableCC](self, p) { + + def map[B : Ordering](f: A => B): CC[B] = + self.sortedIterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = + self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) + } + +} + +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) + diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala new file mode 100644 index 000000000000..303e5ea9658c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +/** Base trait for sorted sets */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with collection.StrictOptimizedSortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { +} + +/** + * $factoryInfo + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { + override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss + case _ => super.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala new file mode 100644 index 000000000000..2bcb8dc7845a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -0,0 +1,48 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** + * Base type for mutable sorted set collections + */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +/** + * $factoryInfo + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) From 3b6b6843979d69abcf5bc3310bf0a6425cba11a4 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:33:54 +0100 Subject: [PATCH 068/216] Add Factory.scala to stdlib --- .../stdlib/collection/Factory.scala | 784 ++++++++++++++++++ 1 file changed, 784 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/Factory.scala diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala new file mode 100644 index 000000000000..2b15f1cc15d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} From 5852a76053d6657e9770aaf163e5de92b7399823 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:53:38 +0100 Subject: [PATCH 069/216] Make Factory capture checked --- .../stdlib/collection/Factory.scala | 101 ++++++++++-------- .../stdlib/collection/Iterable.scala | 6 +- tests/pos-special/stdlib/collection/Seq.scala | 2 + .../collection/immutable/Iterable.scala | 2 +- .../collection/mutable/ArrayDeque.scala | 2 +- .../stdlib/collection/mutable/ArraySeq.scala | 2 +- .../stdlib/collection/mutable/Queue.scala | 2 +- .../stdlib/collection/mutable/Stack.scala | 2 +- 8 files changed, 67 insertions(+), 52 deletions(-) diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala index 2b15f1cc15d1..9979ef1bd619 100644 --- a/tests/pos-special/stdlib/collection/Factory.scala +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -18,6 +18,8 @@ import scala.language.implicitConversions import scala.collection.mutable.Builder import scala.annotation.unchecked.uncheckedVariance import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** * A factory that builds a collection of type `C` with elements of type `A`. @@ -29,14 +31,14 @@ import scala.reflect.ClassTag * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) */ -trait Factory[-A, +C] extends Any { +trait Factory[-A, +C] extends Pure { /** * @return A collection of type `C` containing the same elements * as the source collection `it`. * @param it Source collection */ - def fromSpecific(it: IterableOnce[A]): C + def fromSpecific(it: IterableOnce[A]^): C /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ @@ -48,7 +50,7 @@ object Factory { implicit val stringFactory: Factory[Char, String] = new StringFactory @SerialVersionUID(3L) private class StringFactory extends Factory[Char, String] with Serializable { - def fromSpecific(it: IterableOnce[Char]): String = { + def fromSpecific(it: IterableOnce[Char]^): String = { val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) b ++= it b.result() @@ -56,10 +58,10 @@ object Factory { def newBuilder: Builder[Char, String] = new mutable.StringBuilder() } - implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + implicit def arrayFactory[sealed A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] @SerialVersionUID(3L) - private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): Array[A] = { + private class ArrayFactory[sealed A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): Array[A] = { val b = newBuilder b.sizeHint(scala.math.max(0, it.knownSize)) b ++= it @@ -80,7 +82,7 @@ object Factory { * @define coll collection * @define Coll `Iterable` */ -trait IterableFactory[+CC[_]] extends Serializable { +trait IterableFactory[+CC[_]] extends Serializable, Pure { /** Creates a target $coll from an existing source collection * @@ -88,7 +90,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - def from[A](source: IterableOnce[A]): CC[A] + def from[A](source: IterableOnce[A]^): CC[A]^{source} /** An empty collection * @tparam A the type of the ${coll}'s elements @@ -109,7 +111,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param f the function that's repeatedly applied * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f)) /** Produces a $coll that uses a function `f` to produce elements of type `A` * and update an internal state of type `S`. @@ -121,7 +123,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @tparam S Type of the internal state * @return a $coll that produces elements using `f` until `f` returns `None` */ - def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f)) /** Produces a $coll containing a sequence of increasing of integers. * @@ -150,7 +152,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n` evaluations of `elem`. */ - def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem)) /** Produces a two-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -158,7 +160,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2)(elem)) /** Produces a three-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -167,7 +170,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure /** Produces a four-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -177,8 +181,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4)(elem)) /** Produces a five-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -189,15 +193,15 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4, n5)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4, n5)(elem)) /** Produces a $coll containing values of a given function over a range of integer values starting from 0. * @param n The number of elements in the $coll * @param f The function computing element values * @return A $coll consisting of elements `f(0), ..., f(n -1)` */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f)) /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -206,8 +210,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2)` * for `0 <= i1 < n1` and `0 <= i2 < n2`. */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -217,8 +221,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -229,8 +233,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3, i4)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -242,8 +246,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) /** Concatenates all argument collections into a single $coll. * @@ -271,13 +275,14 @@ object IterableFactory { @SerialVersionUID(3L) private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure + // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] } implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = new BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A]^{it} = factory.from(it) def newBuilder(from: Any) = factory.newBuilder } @@ -285,15 +290,20 @@ object IterableFactory { class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it) def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] } } +// !!! Needed to add this separate trait +trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]: + def from[A](source: IterableOnce[A]^): CC[A] + override def apply[A](elems: A*): CC[A] = from(elems) + /** * @tparam CC Collection type constructor (e.g. `List`) */ -trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] { import SeqFactory.UnapplySeqWrapper final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? } @@ -303,7 +313,7 @@ object SeqFactory { class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it) def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] } @@ -366,6 +376,8 @@ trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFac * @define Coll `Iterable` */ trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + this: SpecificIterableFactory[A, C] => + def empty: C def apply(xs: A*): C = fromSpecific(xs) def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) @@ -381,7 +393,7 @@ trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { * @define coll collection * @define Coll `Iterable` */ -trait MapFactory[+CC[_, _]] extends Serializable { +trait MapFactory[+CC[_, _]] extends Serializable, Pure { /** * An empty Map @@ -391,7 +403,7 @@ trait MapFactory[+CC[_, _]] extends Serializable { /** * A collection of type Map generated from given iterable object. */ - def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V] /** * A collection of type Map that contains given key/value bindings. @@ -424,20 +436,20 @@ object MapFactory { @SerialVersionUID(3L) private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] } implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) def newBuilder(from: Any) = factory.newBuilder[K, V] } @SerialVersionUID(3L) class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) - def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it) def empty[K, V]: C[K, V] = delegate.empty def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder } @@ -454,9 +466,9 @@ object MapFactory { * @define coll collection * @define Coll `Iterable` */ -trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure { - def from[E : Ev](it: IterableOnce[E]): CC[E] + def from[E : Ev](it: IterableOnce[E]^): CC[E] def empty[A : Ev]: CC[A] @@ -517,7 +529,7 @@ object EvidenceIterableFactory { @SerialVersionUID(3L) private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it) def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] } @@ -531,7 +543,7 @@ object EvidenceIterableFactory { class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) def empty[A : Ev]: CC[A] = delegate.empty - def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it) def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] } } @@ -668,7 +680,7 @@ object ClassTagIterableFactory { @SerialVersionUID(3L) class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) @@ -734,10 +746,11 @@ trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extend * @define Coll `Iterable` */ trait SortedMapFactory[+CC[_, _]] extends Serializable { + this: SortedMapFactory[CC] => def empty[K : Ordering, V]: CC[K, V] - def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) @@ -764,7 +777,7 @@ object SortedMapFactory { @SerialVersionUID(3L) private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] } @@ -777,7 +790,7 @@ object SortedMapFactory { @SerialVersionUID(3L) class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) - def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it) def empty[K : Ordering, V]: CC[K, V] = delegate.empty def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder } diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 85c0debc6685..7b5eb39314d3 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -400,7 +400,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable if (i != headSize) fail } - iterableFactory.from(bs.map(_.result())) + iterableFactory.from(bs.map(_.result())).asInstanceOf // !!! needed for cc } def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) @@ -902,10 +902,10 @@ object IterableOps { protected def filtered: Iterable[A]^{this} = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B]^{this} = + def map[B](f: A => B): CC[B]^{this, f} = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this} = + def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this, f} = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index 05b315c49a8c..3a30906b467c 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -82,6 +82,8 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) + def iterableFactory: FreeSeqFactory[CC] + /** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should * not be assumed to be efficient unless you have an `IndexedSeq`. */ @throws[IndexOutOfBoundsException] diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index 44f13d0f2895..c4f9900eea8b 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -32,7 +32,7 @@ trait Iterable[+A] extends collection.Iterable[A] @SerialVersionUID(3L) object Iterable extends IterableFactory.Delegate[Iterable](List) { - override def from[E](it: IterableOnce[E]): Iterable[E] = it match { + override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match { case iterable: Iterable[E] => iterable case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala index 8c6b059cd8a6..f22aacec65c5 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -526,7 +526,7 @@ class ArrayDeque[sealed A] protected ( @SerialVersionUID(3L) object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { - def from[sealed B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + def from[sealed B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = { val s = coll.knownSize if (s >= 0) { val array = alloc(s) diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala index 9bdb28517eff..1c41f68bc8bb 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -110,7 +110,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it)) def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala index 8c0b5cea1133..a578b0742009 100644 --- a/tests/pos-special/stdlib/collection/mutable/Queue.scala +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -130,7 +130,7 @@ class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) @SerialVersionUID(3L) object Queue extends StrictOptimizedSeqFactory[Queue] { - def from[sealed A](source: IterableOnce[A]): Queue[A] = empty ++= source + def from[sealed A](source: IterableOnce[A]^): Queue[A] = empty ++= source def empty[sealed A]: Queue[A] = new Queue diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala index d39afe4d099e..4efa9621f374 100644 --- a/tests/pos-special/stdlib/collection/mutable/Stack.scala +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -135,7 +135,7 @@ class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) @SerialVersionUID(3L) object Stack extends StrictOptimizedSeqFactory[Stack] { - def from[sealed A](source: IterableOnce[A]): Stack[A] = empty ++= source + def from[sealed A](source: IterableOnce[A]^): Stack[A] = empty ++= source def empty[sealed A]: Stack[A] = new Stack From c9cb0449eddf63c500a88340709f4111875d2d59 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:54:28 +0100 Subject: [PATCH 070/216] Avoid reporting post check messages several times --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 1 + .../tools/dotc/reporting/UniqueMessagePositions.scala | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index e90a8394f87d..8ba53693870c 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -707,4 +707,5 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postCheck()(using Context): Unit = for chk <- todoAtPostCheck do chk(ctx) + todoAtPostCheck.clear() end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 98fd7da3032a..71b2636ab8ed 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter { || dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value - && (dia.pos.start to dia.pos.end).exists(pos => - positions.get((ctx.source, pos)).exists(_.hides(dia))) + && (dia.pos.start to dia.pos.end).exists: offset => + positions.get((ctx.source, offset)).exists(_.hides(dia)) override def markReported(dia: Diagnostic)(using Context): Unit = if dia.pos.exists then - for (pos <- dia.pos.start to dia.pos.end) - positions.get(ctx.source, pos) match + for offset <- dia.pos.start to dia.pos.end do + positions.get((ctx.source, offset)) match case Some(dia1) if dia1.hides(dia) => - case _ => positions((ctx.source, pos)) = dia + case _ => positions((ctx.source, offset)) = dia super.markReported(dia) } From 0a350ea88bd317079f472fb00d222bb756d6c15a Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:54:51 +0100 Subject: [PATCH 071/216] Add SortedSet and BitSet to stdlib (capture checked) --- tests/pos-special/stdlib/collection/BitSet.scala | 2 +- tests/pos-special/stdlib/collection/SortedSet.scala | 5 +++-- tests/pos-special/stdlib/collection/immutable/BitSet.scala | 2 +- .../pos-special/stdlib/collection/immutable/SortedSet.scala | 3 ++- tests/pos-special/stdlib/collection/mutable/BitSet.scala | 2 +- tests/pos-special/stdlib/collection/mutable/SortedSet.scala | 1 + 6 files changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala index 62326dfd2152..39c15dbe808f 100644 --- a/tests/pos-special/stdlib/collection/BitSet.scala +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -48,7 +48,7 @@ object BitSet extends SpecificIterableFactory[Int, BitSet] { def empty: BitSet = immutable.BitSet.empty def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder - def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it) @SerialVersionUID(3L) private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala index c98ca9ae5523..fb2f879edcd2 100644 --- a/tests/pos-special/stdlib/collection/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -14,6 +14,7 @@ package scala.collection import scala.annotation.{implicitNotFound, nowarn} import scala.annotation.unchecked.uncheckedVariance +import language.experimental.captureChecking /** Base type of sorted sets */ trait SortedSet[A] extends Set[A] @@ -68,7 +69,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * @param start The lower-bound (inclusive) of the iterator */ def iteratorFrom(start: A): Iterator[A] - + @deprecated("Use `iteratorFrom` instead.", "2.13.0") @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) @@ -178,7 +179,7 @@ object SortedSetOps { def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} = new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) } diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala index 109f32d6f49a..9c2bfdad54d0 100644 --- a/tests/pos-special/stdlib/collection/immutable/BitSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -95,7 +95,7 @@ sealed abstract class BitSet @SerialVersionUID(3L) object BitSet extends SpecificIterableFactory[Int, BitSet] { - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = it match { case bs: BitSet => bs case _ => (newBuilder ++= it).result() diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala index 303e5ea9658c..874abcaecda1 100644 --- a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -13,6 +13,7 @@ package scala package collection package immutable +import language.experimental.captureChecking /** Base trait for sorted sets */ trait SortedSet[A] @@ -50,7 +51,7 @@ trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[ */ @SerialVersionUID(3L) object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { - override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match { case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala index 17005fd16a9f..dcb8a157389b 100644 --- a/tests/pos-special/stdlib/collection/mutable/BitSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -361,7 +361,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) @SerialVersionUID(3L) object BitSet extends SpecificIterableFactory[Int, BitSet] { - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it) def empty: BitSet = new BitSet() diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala index 2bcb8dc7845a..e657fb749d7d 100644 --- a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** * Base type for mutable sorted set collections From 06744c2a42faf7fe1626672365ba3f71d61b8a47 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:56:06 +0100 Subject: [PATCH 072/216] Add BuildFrom.scala to stdlib --- .../stdlib/collection/BuildFrom.scala | 122 ++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/BuildFrom.scala diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala new file mode 100644 index 000000000000..bc9c49d9493c --- /dev/null +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import scala.collection.mutable.Builder +import scala.collection.immutable.WrappedString +import scala.reflect.ClassTag + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") +trait BuildFrom[-From, -A, +C] extends Any { self => + def fromSpecific(from: From)(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) + + /** Partially apply a BuildFrom to a Factory */ + def toFactory(from: From): Factory[A, C] = new Factory[A, C] { + def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def newBuilder: Builder[A, C] = self.newBuilder(from) + } +} + +object BuildFrom extends BuildFromLowPriority1 { + + /** Build the source collection type from a MapOps */ + implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + } + + /** Build the source collection type from a SortedMapOps */ + implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + } + + implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = + new BuildFrom[C, Int, C] { + def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder + } + + implicit val buildFromString: BuildFrom[String, Char, String] = + new BuildFrom[String, Char, String] { + def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder + } + + implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = + new BuildFrom[WrappedString, Char, WrappedString] { + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder + } + + implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + new BuildFrom[Array[_], A, Array[A]] { + def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder + } + + implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + new BuildFrom[View[A], B, View[B]] { + def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder + } + +} + +trait BuildFromLowPriority1 extends BuildFromLowPriority2 { + + /** Build the source collection type from an Iterable with SortedOps */ + // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the + // implicit search space for faster compilation and reduced change of divergence. See the compilation + // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 + implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + } + + implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = + new BuildFrom[String, A, immutable.IndexedSeq[A]] { + def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] + } +} + +trait BuildFromLowPriority2 { + /** Build the source collection type from an IterableOps */ + implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + } + + implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { + def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + } +} From 51c9385101bef1a9b7db79553112763ab6dfdf44 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 23:13:26 +0100 Subject: [PATCH 073/216] Make BuildFrom capture checked --- .../stdlib/collection/BuildFrom.scala | 36 +++++++++++-------- .../stdlib/collection/Factory.scala | 7 ++-- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala index bc9c49d9493c..15a8acf2e6ef 100644 --- a/tests/pos-special/stdlib/collection/BuildFrom.scala +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -16,6 +16,8 @@ import scala.annotation.implicitNotFound import scala.collection.mutable.Builder import scala.collection.immutable.WrappedString import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. * Implicit instances of `BuildFrom` are available for all collection types. @@ -26,7 +28,11 @@ import scala.reflect.ClassTag */ @implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") trait BuildFrom[-From, -A, +C] extends Any { self => - def fromSpecific(from: From)(it: IterableOnce[A]): C + def fromSpecific(from: From)(it: IterableOnce[A]^): C + // !!! this is wrong, we need two versions of fromSpecific; one mapping + // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. + // But that requires a lareg scale refactoring of BuildFrom. The unsafeAssumePure + // calls in this file are needed to sweep that problem under the carpet. /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ @@ -37,7 +43,7 @@ trait BuildFrom[-From, -A, +C] extends Any { self => /** Partially apply a BuildFrom to a Factory */ def toFactory(from: From): Factory[A, C] = new Factory[A, C] { - def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it) def newBuilder: Builder[A, C] = self.newBuilder(from) } } @@ -48,42 +54,42 @@ object BuildFrom extends BuildFromLowPriority1 { implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { //TODO: Reuse a prototype instance def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) } /** Build the source collection type from a SortedMapOps */ implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) } implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = new BuildFrom[C, Int, C] { - def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it) def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder } implicit val buildFromString: BuildFrom[String, Char, String] = new BuildFrom[String, Char, String] { - def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it) def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder } implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = new BuildFrom[WrappedString, Char, WrappedString] { - def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it) def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder } - implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + implicit def buildFromArray[sealed A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = new BuildFrom[Array[_], A, Array[A]] { - def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it) def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder } - implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + implicit def buildFromView[A, sealed B]: BuildFrom[View[A], B, View[B]] = new BuildFrom[View[A], B, View[B]] { - def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder } @@ -97,12 +103,12 @@ trait BuildFromLowPriority1 extends BuildFromLowPriority2 { // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) } implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = new BuildFrom[String, A, immutable.IndexedSeq[A]] { - def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] } } @@ -112,11 +118,11 @@ trait BuildFromLowPriority2 { implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { //TODO: Reuse a prototype instance def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure } implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder - def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure } } diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala index 9979ef1bd619..c45776b62b9c 100644 --- a/tests/pos-special/stdlib/collection/Factory.scala +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -282,7 +282,8 @@ object IterableFactory { implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = new BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A]^{it} = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = + factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary def newBuilder(from: Any) = factory.newBuilder } @@ -535,7 +536,7 @@ object EvidenceIterableFactory { implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it) def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] } @@ -783,7 +784,7 @@ object SortedMapFactory { implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) def newBuilder(from: Any) = factory.newBuilder[K, V] } From 238317c2f73cec3da48304e37087cbe6d93a81aa Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 09:33:43 +0100 Subject: [PATCH 074/216] Add WithFilter.scala to stdlib --- .../stdlib/collection/WithFilter.scala | 70 +++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/WithFilter.scala diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala new file mode 100644 index 000000000000..4699abbef5a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -0,0 +1,70 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ +@SerialVersionUID(3L) +abstract class WithFilter[+A, +CC[_]] extends Serializable { + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given function `f` to each element of the filtered outer $coll + * and collecting the results. + */ + def map[B](f: A => B): CC[B] + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll containing this `WithFilter` instance that satisfy + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given collection-valued function `f` to each element + * of the filtered outer $coll and + * concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]): CC[B] + + /** Applies a function `f` to all elements of the `filtered` outer $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + */ + def foreach[U](f: A => U): Unit + + /** Further refines the filter for this `filtered` $coll. + * + * @param q the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll which + * also satisfy both `p` and `q` predicates. + */ + def withFilter(q: A => Boolean): WithFilter[A, CC] + +} From 1b757b9e5b9ced9ca59245d7551f5650f760932c Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 09:38:01 +0100 Subject: [PATCH 075/216] Make WithFilter capture checked --- tests/pos-special/stdlib/collection/Iterable.scala | 2 +- tests/pos-special/stdlib/collection/Map.scala | 6 +++--- tests/pos-special/stdlib/collection/WithFilter.scala | 8 +++++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 7b5eb39314d3..491d78a797dd 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -905,7 +905,7 @@ object IterableOps { def map[B](f: A => B): CC[B]^{this, f} = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this, f} = + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index ef4f915ea573..bcfd8d593eda 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -378,13 +378,13 @@ object MapOps { p: ((K, V)) => Boolean ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{p, q} = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala index 4699abbef5a7..0f3830e9fe25 100644 --- a/tests/pos-special/stdlib/collection/WithFilter.scala +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods * of trait `Iterable`. @@ -22,6 +23,7 @@ package scala.collection */ @SerialVersionUID(3L) abstract class WithFilter[+A, +CC[_]] extends Serializable { + this: WithFilter[A, CC]^ => /** Builds a new collection by applying a function to all elements of the * `filtered` outer $coll. @@ -32,7 +34,7 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * the given function `f` to each element of the filtered outer $coll * and collecting the results. */ - def map[B](f: A => B): CC[B] + def map[B](f: A => B): CC[B]^{this, f} /** Builds a new collection by applying a function to all elements of the * `filtered` outer $coll containing this `WithFilter` instance that satisfy @@ -44,7 +46,7 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * of the filtered outer $coll and * concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]): CC[B] + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} /** Applies a function `f` to all elements of the `filtered` outer $coll. * @@ -65,6 +67,6 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * All these operations apply to those elements of this $coll which * also satisfy both `p` and `q` predicates. */ - def withFilter(q: A => Boolean): WithFilter[A, CC] + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} } From 4956e6d80d94ee0552d6229f2bbd93a669fa34da Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 09:39:53 +0100 Subject: [PATCH 076/216] Add ArrayOps to stdlib --- .../stdlib/collection/ArrayOps.scala | 1663 +++++++++++++++++ 1 file changed, 1663 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/ArrayOps.scala diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala new file mode 100644 index 000000000000..485427886625 --- /dev/null +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -0,0 +1,1663 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + copyToArray(destination, 0) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} From 7273c4dc73ad86a8bd4a044c0828fa7c61d26a4b Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 10:37:22 +0100 Subject: [PATCH 077/216] Don't flag wildcard array arguments for not being sealed --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 3e246c754feb..9d56c93bfaf5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -21,7 +21,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.DefaultGetterName +import NameKinds.{DefaultGetterName, WildcardParamName} import reporting.trace /** The capture checker */ @@ -1318,7 +1318,9 @@ class CheckCaptures extends Recheck, SymTransformer: def traverse(t: Type): Unit = t match case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => - if !(pos.span.isSynthetic && ctx.reporter.errorsReported) then + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) + && !arg.typeSymbol.name.is(WildcardParamName) + then CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, "Array", "have element type", "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", @@ -1341,10 +1343,11 @@ class CheckCaptures extends Recheck, SymTransformer: val lctx = tree match case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol) case _ => ctx - traverseChildren(tree)(using lctx) - check(tree) + trace(i"post check $tree"): + traverseChildren(tree)(using lctx) + check(tree) def check(tree: Tree)(using Context) = tree match - case t @ TypeApply(fun, args) => + case TypeApply(fun, args) => fun.knownType.widen match case tl: PolyType => val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => From b67422ed2775984a6dd3a653cc447a7a9a7bed88 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 11:36:27 +0100 Subject: [PATCH 078/216] Fix isPureClass test --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 6 +++++- .../captures/exception-definitions.check | 8 ++++---- tests/neg-custom-args/captures/leaked-curried.check | 11 ++++------- tests/neg-custom-args/captures/leaked-curried.scala | 4 ++-- .../stdlib/collection/mutable/ArraySeq.scala | 3 ++- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 0fe79da30ca5..40e94ebde5dd 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -231,7 +231,11 @@ extension (cls: ClassSymbol) && bc.givenSelfType.dealiasKeepAnnots.match case CapturingType(_, refs) => refs.isAlwaysEmpty case RetainingType(_, refs) => refs.isEmpty - case selfType => selfType.exists && selfType.captureSet.isAlwaysEmpty + case selfType => + isCaptureChecking // At Setup we have not processed self types yet, so + // unless a self type is explicitly given, we can't tell + // and err on the side of impure. + && selfType.exists && selfType.captureSet.isAlwaysEmpty extension (sym: Symbol) diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 16d623e64f7c..4b1fe0273f52 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -6,8 +6,8 @@ -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable --- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2 +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ---------------------------------------------- 8 | class Err3(c: Any^) extends Exception // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of pure base class class Throwable + | ^ + | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3 diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check index c23d1516acf5..3f0a9800a4ec 100644 --- a/tests/neg-custom-args/captures/leaked-curried.check +++ b/tests/neg-custom-args/captures/leaked-curried.check @@ -2,10 +2,7 @@ 14 | () => () => io // error | ^^ |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/leaked-curried.scala:15:10 ------------------------------- -15 | class Foo extends Box, Pure: // error - | ^ - | illegal inheritance: self type Foo^{io} of class Foo does not conform to self type Pure - | of parent trait Pure - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ---------------------------------------------------- +17 | () => () => io // error + | ^^ + |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala index a7c48219b450..f9238259e065 100644 --- a/tests/neg-custom-args/captures/leaked-curried.scala +++ b/tests/neg-custom-args/captures/leaked-curried.scala @@ -12,8 +12,8 @@ def main(): Unit = self => val get: () ->{} () ->{io} Cap^ = () => () => io // error - class Foo extends Box, Pure: // error + class Foo extends Box, Pure: val get: () ->{} () ->{io} Cap^ = - () => () => io + () => () => io // error new Foo val bad = leaked.get()().use() // using a leaked capability diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala index 1c41f68bc8bb..bd3a208a94c0 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -41,7 +41,8 @@ sealed abstract class ArraySeq[sealed T] with IndexedSeq[T] with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] - with Serializable { + with Serializable + with Pure { override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged From c64c057de60201f6245e59d52815e78c38cbd256 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 15:19:47 +0100 Subject: [PATCH 079/216] Make sealed an annotation --- .../src/dotty/tools/dotc/typer/Namer.scala | 9 +- tests/neg/class-mods.scala | 2 +- .../captures/sealed-value-class.scala | 3 + .../stdlib/collection/ArrayOps.scala | 113 +++++++++--------- 4 files changed, 69 insertions(+), 58 deletions(-) create mode 100644 tests/pos-custom-args/captures/sealed-value-class.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 7ef552e3661c..5361f37c2a76 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1042,7 +1042,14 @@ class Namer { typer: Typer => tp val rhs1 = typedAheadType(rhs) - val rhsBodyType: TypeBounds = addVariances(rhs1.tpe).toBounds + val rhsBodyType: TypeBounds = + val bounds = addVariances(rhs1.tpe).toBounds + if sym.is(Sealed) then + sym.resetFlag(Sealed) + bounds.derivedTypeBounds(bounds.lo, + AnnotatedType(bounds.hi, Annotation(defn.Caps_SealedAnnot, rhs1.span))) + else bounds + val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType) def opaqueToBounds(info: Type): Type = diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala index 60e9fb279364..cf4348ad42d7 100644 --- a/tests/neg/class-mods.scala +++ b/tests/neg/class-mods.scala @@ -2,7 +2,7 @@ open final class Foo1 // error sealed open class Foo2 // error open type T1 // error -sealed type T2 // error +type T2 // ok abstract type T3 // error abstract open type T4 // error diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala new file mode 100644 index 000000000000..b5f25bf2d203 --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-value-class.scala @@ -0,0 +1,3 @@ +class Ops[sealed A](xs: Array[A]) extends AnyVal: + + def f(p: A => Boolean): Array[A] = xs diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala index 485427886625..13f3bd0d5ed2 100644 --- a/tests/pos-special/stdlib/collection/ArrayOps.scala +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -15,6 +15,7 @@ package collection import java.lang.Math.{max, min} import java.util.Arrays +import language.experimental.captureChecking import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -53,14 +54,14 @@ import scala.util.Sorting object ArrayOps { @SerialVersionUID(3L) - private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + private class ArrayView[sealed A](xs: Array[A]) extends AbstractIndexedSeqView[A] { def length = xs.length def apply(n: Int) = xs(n) override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") } /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ - class WithFilter[A](p: A => Boolean, xs: Array[A]) { + class WithFilter[sealed A](p: A => Boolean, xs: Array[A]) { /** Apply `f` to each element for its side effects. * Note: [U] parameter needed to help scalac's type inference. @@ -82,7 +83,7 @@ object ArrayOps { * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[B: ClassTag](f: A => B): Array[B] = { + def map[sealed B: ClassTag](f: A => B): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while (i < xs.length) { @@ -101,7 +102,7 @@ object ArrayOps { * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + def flatMap[sealed B: ClassTag](f: A => IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while(i < xs.length) { @@ -112,15 +113,15 @@ object ArrayOps { b.result() } - def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = flatMap[B](x => asIterable(f(x))) /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs) } @SerialVersionUID(3L) - private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = 0 private[this] val len = xs.length override def knownSize: Int = len - pos @@ -143,7 +144,7 @@ object ArrayOps { } @SerialVersionUID(3L) - private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private final class ReverseIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = xs.length-1 def hasNext: Boolean = pos >= 0 def next(): A = { @@ -160,7 +161,7 @@ object ArrayOps { } @SerialVersionUID(3L) - private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private final class GroupedIterator[sealed A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { private[this] var pos = 0 def hasNext: Boolean = pos < xs.length def next(): Array[A] = { @@ -196,7 +197,7 @@ object ArrayOps { * * @tparam A type of the elements contained in this array. */ -final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { +final class ArrayOps[sealed A](private val xs: Array[A]) /*extends AnyVal*/ { @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) @@ -366,7 +367,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} = Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) /** An array containing the first `n` elements of this array. */ @@ -504,7 +505,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + def partitionMap[sealed A1: ClassTag, sealed A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { val res1 = ArrayBuilder.make[A1] val res2 = ArrayBuilder.make[A2] var i = 0 @@ -663,7 +664,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * All these operations apply to those elements of this array * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs) /** Finds index of first occurrence of some value in this array after or at some start index. * @@ -776,7 +777,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Returns `z` if this array is empty. */ def foldLeft[B](z: B)(op: (B, A) => B): B = { - def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { val length = xs.length var v: Any = z var i = 0 @@ -815,7 +816,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * }}} * */ - def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + def scanLeft[sealed B : ClassTag](z: B)(op: (B, A) => B): Array[B] = { var v = z var i = 0 val res = new Array[B](xs.length + 1) @@ -838,7 +839,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * * @return a new array containing the prefix scan of the elements in this array */ - def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + def scan[sealed B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) /** Produces an array containing cumulative results of applying the binary * operator going right to left. @@ -854,7 +855,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * }}} * */ - def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + def scanRight[sealed B : ClassTag](z: B)(op: (A, B) => B): Array[B] = { var v = z var i = xs.length - 1 val res = new Array[B](xs.length + 1) @@ -882,7 +883,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Returns `z` if this array is empty. */ def foldRight[B](z: B)(op: (A, B) => B): B = { - def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { var v = z var i = xs.length - 1 while(i >= 0) { @@ -925,7 +926,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + def map[sealed B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { val len = xs.length val ys = new Array[B](len) if(len > 0) { @@ -962,7 +963,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + def flatMap[sealed B : ClassTag](f: A => IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while(i < xs.length) { @@ -972,7 +973,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { b.result() } - def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = flatMap[B](x => asIterable(f(x))) /** Flattens a two-dimensional array by concatenating all its rows @@ -982,7 +983,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. * @return An array obtained by concatenating rows of this array. */ - def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + def flatten[sealed B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { val b = ArrayBuilder.make[B] val len = xs.length var size = 0 @@ -1015,7 +1016,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + def collect[sealed B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { val fallback: Any => Any = ArrayOps.fallback val b = ArrayBuilder.make[B] var i = 0 @@ -1049,7 +1050,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array containing pairs consisting of corresponding elements of this array and `that`. * The length of the returned array is the minimum of the lengths of this array and `that`. */ - def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + def zip[sealed B](that: IterableOnce[B]): Array[(A, B)] = { val b = new ArrayBuilder.ofRef[(A, B)]() val k = that.knownSize b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) @@ -1094,7 +1095,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * If this array is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this array, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + def zipAll[sealed A1 >: A, sealed B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { val b = new ArrayBuilder.ofRef[(A1, B)]() val k = that.knownSize b.sizeHint(max(k, xs.length)) @@ -1131,26 +1132,26 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with an element appended. */ - def appended[B >: A : ClassTag](x: B): Array[B] = { + def appended[sealed B >: A : ClassTag](x: B): Array[B] = { val dest = Array.copyAs[B](xs, xs.length+1) dest(xs.length) = x dest } - @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + @`inline` final def :+ [sealed B >: A : ClassTag](x: B): Array[B] = appended(x) /** A copy of this array with an element prepended. */ - def prepended[B >: A : ClassTag](x: B): Array[B] = { + def prepended[sealed B >: A : ClassTag](x: B): Array[B] = { val dest = new Array[B](xs.length + 1) dest(0) = x Array.copy(xs, 0, dest, 1, xs.length) dest } - @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + @`inline` final def +: [sealed B >: A : ClassTag](x: B): Array[B] = prepended(x) /** A copy of this array with all elements of a collection prepended. */ - def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + def prependedAll[sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] val k = prefix.knownSize if(k >= 0) b.sizeHint(k + xs.length) @@ -1161,18 +1162,18 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with all elements of an array prepended. */ - def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + def prependedAll[sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { val dest = Array.copyAs[B](prefix, prefix.length+xs.length) Array.copy(xs, 0, dest, prefix.length, xs.length) dest } - @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + @`inline` final def ++: [sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) - @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + @`inline` final def ++: [sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) /** A copy of this array with all elements of a collection appended. */ - def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + def appendedAll[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] val k = suffix.knownSize if(k >= 0) b.sizeHint(k + xs.length) @@ -1182,23 +1183,23 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with all elements of an array appended. */ - def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + def appendedAll[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { val dest = Array.copyAs[B](xs, xs.length+suffix.length) Array.copy(suffix, 0, dest, xs.length, suffix.length) dest } - @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + @`inline` final def :++ [sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + @`inline` final def :++ [sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + @`inline` final def concat[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + @`inline` final def concat[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + @`inline` final def ++[sealed B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) - @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + @`inline` final def ++[sealed B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) /** Tests whether this array contains a given value as an element. * @@ -1217,7 +1218,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param other The patch values * @param replaced The number of values in the original array that are replaced by the patch. */ - def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + def patch[sealed B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { val b = ArrayBuilder.make[B] val k = other.knownSize val r = if(replaced < 0) 0 else replaced @@ -1243,7 +1244,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a pair of Arrays, containing, respectively, the first and second half * of each element pair of this Array. */ - def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + def unzip[sealed A1, sealed A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { val a1 = new Array[A1](xs.length) val a2 = new Array[A2](xs.length) var i = 0 @@ -1272,7 +1273,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a triple of Arrays, containing, respectively, the first, second, and third * elements from each element triple of this Array. */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + def unzip3[sealed A1, sealed A2, sealed A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { val a1 = new Array[A1](xs.length) val a2 = new Array[A2](xs.length) @@ -1294,7 +1295,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param asArray A function that converts elements of this array to rows - arrays of type `B`. * @return An array obtained by replacing elements of this arrays with rows the represent. */ - def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + def transpose[sealed B](implicit asArray: A => Array[B]): Array[Array[B]] = { val aClass = xs.getClass.getComponentType val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() @@ -1345,7 +1346,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @tparam B the type of the elements after being transformed by `f` * @return a new array consisting of all the elements of this array without duplicates. */ - def distinctBy[B](f: A => B): Array[A] = + def distinctBy[B](f: A -> B): Array[A] = ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() /** A copy of this array with an element value appended until a given target length is reached. @@ -1357,7 +1358,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * all elements of this array followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + def padTo[sealed B >: A : ClassTag](len: Int, elem: B): Array[B] = { var i = xs.length val newlen = max(i, len) val dest = Array.copyAs[B](xs, newlen) @@ -1417,7 +1418,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @tparam K the type of keys returned by the discriminator function * @tparam B the type of values returned by the transformation function */ - def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + def groupMap[K, sealed B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { val m = mutable.Map.empty[K, ArrayBuilder[B]] val len = xs.length var i = 0 @@ -1444,7 +1445,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param xs the array to fill. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0) /** Copy elements of this array to another array. * Fills the given array `xs` starting at index `start`. @@ -1455,7 +1456,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param start the starting index within the destination array. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements of this array to another array. * Fills the given array `xs` starting at index `start` with at most `len` values. @@ -1467,7 +1468,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param len the maximal number of elements to copy. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) if (copied > 0) { Array.copy(this.xs, 0, xs, start, copied) @@ -1476,7 +1477,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** Create a copy of this array with the specified element type. */ - def toArray[B >: A: ClassTag]: Array[B] = { + def toArray[sealed B >: A: ClassTag]: Array[B] = { val destination = new Array[B](xs.length) copyToArray(destination, 0) destination @@ -1495,7 +1496,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { // can't use a default arg because we already have another overload with a default arg /** Tests whether this array starts with the given array. */ - @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + @`inline` def startsWith[sealed B >: A](that: Array[B]): Boolean = startsWith(that, 0) /** Tests whether this array contains the given array at a given index. * @@ -1504,7 +1505,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return `true` if the array `that` is contained in this array at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + def startsWith[sealed B >: A](that: Array[B], offset: Int): Boolean = { val safeOffset = offset.max(0) val thatl = that.length if(thatl > xs.length-safeOffset) thatl == 0 @@ -1523,7 +1524,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param that the array to test * @return `true` if this array has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Array[B]): Boolean = { + def endsWith[sealed B >: A](that: Array[B]): Boolean = { val thatl = that.length val off = xs.length - thatl if(off < 0) false @@ -1543,7 +1544,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. */ - def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + def updated[sealed B >: A : ClassTag](index: Int, elem: B): Array[B] = { if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") val dest = toArray[B] dest(index) = elem From 2083af3e08f478f599847769d746761464635cb0 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 15:45:36 +0100 Subject: [PATCH 080/216] Make ArrayOps capture checked --- tests/pos-special/stdlib/collection/ArrayOps.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala index 13f3bd0d5ed2..a52fd0dbd162 100644 --- a/tests/pos-special/stdlib/collection/ArrayOps.scala +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -197,7 +197,7 @@ object ArrayOps { * * @tparam A type of the elements contained in this array. */ -final class ArrayOps[sealed A](private val xs: Array[A]) /*extends AnyVal*/ { +final class ArrayOps[sealed A](private val xs: Array[A]) extends AnyVal { @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) From 9909d95dd82fe4ee261862bca5d407c16fed9fab Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 15:45:58 +0100 Subject: [PATCH 081/216] Add LazyZipOps and some other files to stdlib --- .../stdlib/collection/BufferedIterator.scala | 32 ++ .../stdlib/collection/Hashing.scala | 63 +++ .../stdlib/collection/Iterable.scala | 2 +- .../stdlib/collection/JavaConverters.scala | 336 ++++++++++++++ .../stdlib/collection/LazyZipOps.scala | 423 ++++++++++++++++++ 5 files changed, 855 insertions(+), 1 deletion(-) create mode 100644 tests/pos-special/stdlib/collection/BufferedIterator.scala create mode 100644 tests/pos-special/stdlib/collection/Hashing.scala create mode 100644 tests/pos-special/stdlib/collection/JavaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/LazyZipOps.scala diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala new file mode 100644 index 000000000000..cca40dd31d40 --- /dev/null +++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** Buffered iterators are iterators which provide a method `head` + * that inspects the next element without discarding it. + */ +trait BufferedIterator[+A] extends Iterator[A] { + + /** Returns next element of iterator without advancing beyond it. + */ + def head: A + + /** Returns an option of the next element of an iterator without advancing beyond it. + * @return the next element of this iterator if it has a next element + * `None` if it does not + */ + def headOption : Option[A] = if (hasNext) Some(head) else None + + override def buffered: this.type = this +} diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala new file mode 100644 index 000000000000..772dcf5c65da --- /dev/null +++ b/tests/pos-special/stdlib/collection/Hashing.scala @@ -0,0 +1,63 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +import language.experimental.captureChecking + + +protected[collection] object Hashing { + + def elemHashCode(key: Any): Int = key.## + + def improve(hcode: Int): Int = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) + } + + def computeHash(key: Any): Int = + improve(elemHashCode(key)) + + /** + * Utility method to keep a subset of all bits in a given bitmap + * + * Example + * bitmap (binary): 00000001000000010000000100000001 + * keep (binary): 1010 + * result (binary): 00000001000000000000000100000000 + * + * @param bitmap the bitmap + * @param keep a bitmask containing which bits to keep + * @return the original bitmap with all bits where keep is not 1 set to 0 + */ + def keepBits(bitmap: Int, keep: Int): Int = { + var result = 0 + var current = bitmap + var kept = keep + while (kept != 0) { + // lowest remaining bit in current + val lsb = current ^ (current & (current - 1)) + if ((kept & 1) != 0) { + // mark bit in result bitmap + result |= lsb + } + // clear lowest remaining one bit in abm + current &= ~lsb + // look at the next kept bit + kept >>>= 1 + } + result + } + +} diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 491d78a797dd..bca80d7be108 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -96,7 +96,7 @@ trait Iterable[+A] extends IterableOnce[A] * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) + def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that) } /** Base trait for Iterable operations diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala new file mode 100644 index 000000000000..69130eae1829 --- /dev/null +++ b/tests/pos-special/stdlib/collection/JavaConverters.scala @@ -0,0 +1,336 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.convert._ +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** A variety of decorators that enable converting between + * Scala and Java collections using extension methods, `asScala` and `asJava`. + * + * The extension methods return adapters for the corresponding API. + * + * The following conversions are supported via `asScala` and `asJava`: + *{{{ + * scala.collection.Iterable <=> java.lang.Iterable + * scala.collection.Iterator <=> java.util.Iterator + * scala.collection.mutable.Buffer <=> java.util.List + * scala.collection.mutable.Set <=> java.util.Set + * scala.collection.mutable.Map <=> java.util.Map + * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap + *}}} + * The following conversions are supported via `asScala` and through + * specially-named extension methods to convert to Java collections, as shown: + *{{{ + * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) + * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) + * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) + *}}} + * In addition, the following one-way conversions are provided via `asJava`: + *{{{ + * scala.collection.Seq => java.util.List + * scala.collection.mutable.Seq => java.util.List + * scala.collection.Set => java.util.Set + * scala.collection.Map => java.util.Map + *}}} + * The following one way conversion is provided via `asScala`: + *{{{ + * java.util.Properties => scala.collection.mutable.Map + *}}} + * In all cases, converting from a source type to a target type and back + * again will return the original source object. For example: + * {{{ + * import scala.collection.JavaConverters._ + * + * val source = new scala.collection.mutable.ListBuffer[Int] + * val target: java.util.List[Int] = source.asJava + * val other: scala.collection.mutable.Buffer[Int] = target.asScala + * assert(source eq other) + * }}} + * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. + * {{{ + * scala> val vs = java.util.Arrays.asList("hi", "bye") + * vs: java.util.List[String] = [hi, bye] + * + * scala> val ss = asScalaIterator(vs.iterator) + * ss: Iterator[String] = + * + * scala> .toList + * res0: List[String] = List(hi, bye) + * + * scala> val ss = asScalaBuffer(vs) + * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) + * }}} + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object JavaConverters extends AsJavaConverters with AsScalaConverters { + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) + + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) + + @deprecated("Use `asScala` instead", "2.13.0") + def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) + + // Deprecated implicit conversions for code that directly imports them + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[asJavaIterator]] + */ + implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = + new AsJava(asJavaIterator(i)) + + /** + * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[asJavaEnumeration]] + */ + implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = + new AsJavaEnumeration(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[asJavaIterable]] + */ + implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = + new AsJava(asJavaIterable(i)) + + /** + * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[asJavaCollection]] + */ + implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = + new AsJavaCollection(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[bufferAsJavaList]] + */ + implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = + new AsJava(bufferAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[mutableSeqAsJavaList]] + */ + implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = + new AsJava(mutableSeqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. + * @see [[seqAsJavaList]] + */ + implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = + new AsJava(seqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[mutableSetAsJavaSet]] + */ + implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = + new AsJava(mutableSetAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. + * @see [[setAsJavaSet]] + */ + implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = + new AsJava(setAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[mutableMapAsJavaMap]] + */ + implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mutableMapAsJavaMap(m)) + + /** + * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[asJavaDictionary]] + */ + implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = + new AsJavaDictionary(m) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. + * @see [[mapAsJavaMap]] + */ + implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mapAsJavaMap(m)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[mapAsJavaConcurrentMap]]. + */ + implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = + new AsJava(mapAsJavaConcurrentMap(m)) + + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[asScalaIterator]] + */ + implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = + new AsScala(asScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[enumerationAsScalaIterator]] + */ + implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = + new AsScala(enumerationAsScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[iterableAsScalaIterable]] + */ + implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = + new AsScala(iterableAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[collectionAsScalaIterable]] + */ + implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = + new AsScala(collectionAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[asScalaBuffer]] + */ + implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = + new AsScala(asScalaBuffer(l)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[asScalaSet]] + */ + implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = + new AsScala(asScalaSet(s)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[mapAsScalaMap]] + */ + implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(mapAsScalaMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. + * @see [[mapAsScalaConcurrentMap]] + */ + implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = + new AsScala(mapAsScalaConcurrentMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[dictionaryAsScalaMap]] + */ + implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(dictionaryAsScalaMap(p)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * @see [[propertiesAsScalaMap]] + */ + implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = + new AsScala(propertiesAsScalaMap(p)) + + + /** Generic class containing the `asJava` converter method */ + class AsJava[A](op: => A) { + /** Converts a Scala collection to the corresponding Java collection */ + def asJava: A = op + } + + /** Generic class containing the `asScala` converter method */ + class AsScala[A](op: => A) { + /** Converts a Java collection to the corresponding Scala collection */ + def asScala: A = op + } + + /** Generic class containing the `asJavaCollection` converter method */ + class AsJavaCollection[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Collection` */ + def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) + } + + /** Generic class containing the `asJavaEnumeration` converter method */ + class AsJavaEnumeration[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Enumeration` */ + def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) + } + + /** Generic class containing the `asJavaDictionary` converter method */ + class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { + /** Converts a Scala `Map` to a Java `Dictionary` */ + def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) + } +} diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala new file mode 100644 index 000000000000..1bb4173d219f --- /dev/null +++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala @@ -0,0 +1,423 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** Decorator representing lazily zipped pairs. + * + * @define coll pair + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) { + + /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. + * + * @param that the iterable providing the third element of each eventual triple + * @tparam B the type of the third element in each eventual triple + * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or + * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that) + + def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = f(elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext) + _current = f(elems1.next(), elems2.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: (El1, El2) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + if (p(e1, e2)) _current = (e1, e2) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.hasNext + }) + } + + def exists(p: (El1, El2) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) + + res + } + + def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) + + def foreach[U](f: (El1, El2) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + + while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) + } + + private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = (elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + + override def toString = s"$coll1.lazyZip($coll2)" +} + +object LazyZip2 { + implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable +} + + +/** Decorator representing lazily zipped triples. + * + * @define coll triple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^) { + + /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. + * + * @param that the iterable providing the fourth element of each eventual 4-tuple + * @tparam B the type of the fourth element in each eventual 4-tuple + * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. + * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that) + + def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: (El1, El2, El3) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + if (p(e1, e2, e3)) _current = (e1, e2, e3) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next()) + + res + } + + def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) + + def foreach[U](f: (El1, El2, El3) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) + f(elems1.next(), elems2.next(), elems3.next()) + } + + private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else s1 min s2 min s3 + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" +} + +object LazyZip3 { + implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable +} + + + +/** Decorator representing lazily zipped 4-tuples. + * + * @define coll tuple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^, + coll4: Iterable[El4]^) { + + def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: (El1, El2, El3, El4) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + val e4 = elems4.next() + if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + + res + } + + def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) + + def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + + private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else { + val s4 = coll4.knownSize + if (s4 == 0) 0 else s1 min s2 min s3 min s4 + } + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" +} + +object LazyZip4 { + implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = + zipped4.toIterable +} From caeac209d0d0f92ebdd3f32961e058a20cb458bb Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 15:53:08 +0100 Subject: [PATCH 082/216] Add mutable/TreeSet.scala to stdlib --- tests/pos-special/stdlib/collection/Set.scala | 3 +- .../collection/StrictOptimizedSetOps.scala | 30 +++ .../stdlib/collection/mutable/TreeSet.scala | 219 ++++++++++++++++++ 3 files changed, 251 insertions(+), 1 deletion(-) create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/TreeSet.scala diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala index 75707f204c02..a9c279b82a49 100644 --- a/tests/pos-special/stdlib/collection/Set.scala +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -25,7 +25,8 @@ trait Set[A] extends Iterable[A] with SetOps[A, Set, Set[A]] with Equals - with IterableFactoryDefaults[A, Set] { + with IterableFactoryDefaults[A, Set] + with Pure { self: Set[A] => def canEqual(that: Any) = true diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala new file mode 100644 index 000000000000..8ed337fff998 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala @@ -0,0 +1,30 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** + * Trait that overrides set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: IterableOnce[A]): C = + strictOptimizedConcat(that, newSpecificBuilder) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala new file mode 100644 index 000000000000..59c68a768351 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -0,0 +1,219 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} +import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} +import language.experimental.captureChecking + +/** + * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam A the type of the keys contained in this tree set. + * + * @define Coll mutable.TreeSet + * @define coll mutable tree set + */ +// Original API designed in part by Lucien Pereira +sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedIterableOps[A, Set, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) + throw new NullPointerException("ordering must not be null") + + /** + * Creates an empty `TreeSet`. + * @param ord the implicit ordering used to compare objects of type `A`. + * @return an empty `TreeSet`. + */ + def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) + + override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet + + def iterator: collection.Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[A, Null] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: A): this.type = { + RB.insert(tree, elem, null) + this + } + + def subtractOne(elem: A): this.type = { + RB.delete(tree, elem) + this + } + + def clear(): Unit = RB.clear(tree) + + def contains(elem: A): Boolean = RB.contains(tree, elem) + + def unconstrained: collection.Set[A] = this + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) + + override protected[this] def className: String = "TreeSet" + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def head: A = RB.minKey(tree).get + + override def last: A = RB.maxKey(tree).get + + override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) + + override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + + /** + * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. + * + * Only keys between this projection's key range will ever appear as elements of this set, independently of whether + * the elements are added through the original set or through this view. That means that if one inserts an element in + * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. + * Mutations are always reflected in the original set, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: A): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = + new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) + + override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def iterator = RB.keysIterator(tree, from, until) + override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) + + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext + + override def head: A = headOption.get + override def headOption: Option[A] = { + val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) + (elem, until) match { + case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None + case _ => elem + } + } + + override def last: A = lastOption.get + override def lastOption = { + val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) + (elem, from) match { + case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None + case _ => elem + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized + // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: A => U): Unit = iterator.foreach(f) + + override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) + + } + +} + +/** + * $factoryInfo + * @define Coll `mutable.TreeSet` + * @define coll mutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + + def from[E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => + new TreeSet[E](ts.tree.treeCopy()) + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) + case _ => + val t: RB.Tree[E, Null] = RB.Tree.empty + val i = it.iterator + while (i.hasNext) RB.insert(t, i.next(), null) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty + def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } + def result(): TreeSet[A] = new TreeSet[A](tree) + def clear(): Unit = { tree = RB.Tree.empty } + } +} From 1f4f8755f7c3e3309114173c45e9204b5cfe01bb Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 16:01:31 +0100 Subject: [PATCH 083/216] Add mutable and concurrent Map to stdlib --- tests/pos-special/stdlib/collection/Map.scala | 3 +- .../stdlib/collection/concurrent/Map.scala | 193 +++++++++++++ .../stdlib/collection/mutable/Map.scala | 271 ++++++++++++++++++ 3 files changed, 466 insertions(+), 1 deletion(-) create mode 100644 tests/pos-special/stdlib/collection/concurrent/Map.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Map.scala diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index bcfd8d593eda..6816a1fd174a 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -24,7 +24,8 @@ trait Map[K, +V] extends Iterable[(K, V)] with MapOps[K, V, Map, Map[K, V]] with MapFactoryDefaults[K, V, Map, Iterable] - with Equals { + with Equals + with Pure { def mapFactory: scala.collection.MapFactory[Map] = Map diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala new file mode 100644 index 000000000000..d985dad2edc5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala @@ -0,0 +1,193 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.concurrent + +import language.experimental.captureChecking +import scala.annotation.tailrec + +/** A template trait for mutable maps that allow concurrent access. + * + * $concurrentmapinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] + * section on `Concurrent Maps` for more information. + * + * @tparam K the key type of the map + * @tparam V the value type of the map + * + * @define Coll `concurrent.Map` + * @define coll concurrent map + * @define concurrentmapinfo + * This is a base trait for all Scala concurrent map implementations. It + * provides all of the methods a `Map` does, with the difference that all the + * changes are atomic. It also describes methods specific to concurrent maps. + * + * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. + * + * @define atomicop + * This is an atomic operation. + */ +trait Map[K, V] extends scala.collection.mutable.Map[K, V] { + + /** + * Associates the given key with a given value, unless the key was already + * associated with some other value. + * + * $atomicop + * + * @param k key with which the specified value is to be associated with + * @param v value to be associated with the specified key + * @return `Some(oldvalue)` if there was a value `oldvalue` previously + * associated with the specified key, or `None` if there was no + * mapping for the specified key + */ + def putIfAbsent(k: K, v: V): Option[V] + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + def remove(k: K, v: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldvalue value expected to be associated with the specified key + * if replacing is to happen + * @param newvalue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + def replace(k: K, oldvalue: V, newvalue: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped + * to some value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param v value to be associated with the specified key + * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise + */ + def replace(k: K, v: V): Option[V] + + override def getOrElseUpdate(key: K, op: => V): V = get(key) match { + case Some(v) => v + case None => + val v = op + putIfAbsent(key, v) match { + case Some(ov) => ov + case None => v + } + } + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldValue value expected to be associated with the specified key + * if replacing is to happen + * @param newValue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) + + @tailrec + private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = get(key) + val nextValue = remappingFunction(previousValue) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None + } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } + } + updateWithAux(key)(remappingFunction) + } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (!p(k, v)) removeRefEq(k, v) + } + this + } + + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replaceRefEq(k, v, f(k, v)) + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala new file mode 100644 index 000000000000..dab64ddb1f58 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Map.scala @@ -0,0 +1,271 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import language.experimental.captureChecking + +/** Base type of mutable Maps */ +trait Map[K, V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with Growable[(K, V)] + with Shrinkable[K] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + /* + //TODO consider keeping `remove` because it returns the removed entry + @deprecated("Use subtract or -= instead of remove", "2.13.0") + def remove(key: K): Option[V] = { + val old = get(key) + if(old.isDefined) subtract(key) + old + } + */ + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) +} + +/** + * @define coll mutable map + * @define Coll `mutable.Map` + */ +trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] + with Cloneable[C] + with Builder[(K, V), C] + with Growable[(K, V)] + with Shrinkable[K] + with Pure { + + def result(): C = coll + + @deprecated("Use - or remove on an immutable Map", "2.13.0") + final def - (key: K): C = clone() -= key + + @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") + final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys + + /** Adds a new key/value pair to this map and optionally returns previously bound value. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key + * before the `put` operation was executed, or `None` if `key` + * was not defined in the map before. + */ + def put(key: K, value: V): Option[V] = { + val r = get(key) + update(key, value) + r + } + + /** Adds a new key/value pair to this map. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update(key: K, value: V): Unit = { coll += ((key, value)) } + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = this.get(key) + val nextValue = remappingFunction(previousValue) + (previousValue, nextValue) match { + case (None, None) => // do nothing + case (Some(_), None) => this.remove(key) + case (_, Some(v)) => this.update(key,v) + } + nextValue + } + + /** If given key is already in this map, returns associated value. + * + * Otherwise, computes value from given expression `op`, stores with key + * in map and returns that value. + * + * Concurrent map implementations may evaluate the expression `op` + * multiple times, or may evaluate `op` without inserting the result. + * + * @param key the key to test + * @param op the computation yielding the value to associate with `key`, if + * `key` is previously unbound. + * @return the value associated with key (either previously or as a result + * of executing the method). + */ + def getOrElseUpdate(key: K, op: => V): V = + get(key) match { + case Some(v) => v + case None => val d = op; this(key) = d; d + } + + /** Removes a key from this map, returning the value associated previously + * with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the map before. + */ + def remove(key: K): Option[V] = { + val r = get(key) + if (r.isDefined) this -= key + r + } + + def clear(): Unit = { keysIterator foreach -= } + + override def clone(): C = empty ++= this + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) + + /** Retains only those mappings for which the predicate + * `p` returns `true`. + * + * @param p The test predicate + */ + def filterInPlace(p: (K, V) => Boolean): this.type = { + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 + } + } + this + } + + @deprecated("Use mapValuesInPlace instead", "2.13.0") + @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) + + /** Applies a transformation function to all values contained in this map. + * The transformation function produces new values from existing keys + * associated values. + * + * @param f the transformation to apply + * @return the map itself. + */ + def mapValuesInPlace(f: (K, V) => V): this.type = { + if (!isEmpty) this match { + case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) + case _ => + val array = this.toArray[Any] + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + update(k, f(k, v)) + i += 1 + } + } + this + } + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable map + * @define Coll `mutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](HashMap) { + + @SerialVersionUID(3L) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + override def default(key: K): V = defaultValue(key) + + def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = underlying.knownSize + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def clear(): Unit = underlying.clear() + + def get(key: K): Option[V] = underlying.get(key) + + def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] From 3dc0eae125898e755604ff46e6ac025873e85d66 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 16:11:57 +0100 Subject: [PATCH 084/216] Add HashMaps to stdlib --- tests/pos-special/stdlib/collection/Seq.scala | 6 +- .../collection/StrictOptimizedSeqOps.scala | 5 +- .../stdlib/collection/mutable/HashMap.scala | 655 ++++++++++++++++++ .../collection/mutable/LinkedHashMap.scala | 510 ++++++++++++++ .../collection/mutable/LinkedHashSet.scala | 349 ++++++++++ 5 files changed, 1520 insertions(+), 5 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/mutable/HashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index 3a30906b467c..365a1db1b849 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -893,7 +893,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * part of the result, but any following occurrences will. */ def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) fromSpecific(iterator.filter { x => var include = false occ.updateWith(x) { @@ -918,7 +918,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * in the result, but any following occurrences will be omitted. */ def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) fromSpecific(iterator.filter { x => var include = true occ.updateWith(x) { @@ -966,7 +966,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => iterableFactory.from(new View.Updated(this, index, elem)) } - protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { + protected[collection] def occCounts[sealed B](sq: Seq[B]): mutable.Map[B, Int] = { val occ = new mutable.HashMap[B, Int]() for (y <- sq) occ.updateWith(y) { case None => Some(1) diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 50ddbca30f9e..14dea1694d09 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -12,6 +12,7 @@ package scala.collection import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations on sequences in order @@ -79,7 +80,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def diff[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) coll else { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { @@ -97,7 +98,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def intersect[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) empty else { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala new file mode 100644 index 000000000000..ab45e7ffc73d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala @@ -0,0 +1,655 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements mutable maps using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") +class HashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double) + extends AbstractMap[K, V] + with MapOps[K, V, HashMap, HashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with Serializable { + + /* The HashMap class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) + + import HashMap.Node + + /** The actual hash table. */ + private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + // + // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i + // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap + // and that is why unimproveHash simply forwards to this method + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(key: K): Boolean = findNode(key) ne null + + @`inline` private[this] def findNode(key: K): Node[K, V] = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findNode(key, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + sizeHint(xs.knownSize) + + xs match { + case hm: immutable.HashMap[K, V] => + hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) + this + case hm: mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + put0(next.key, next.value, next.hash, getOld = false) + } + this + case lhm: mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val entry = iter.next() + put0(entry.key, entry.value, entry.hash, getOld = false) + } + this + case thatMap: Map[K, V] => + thatMap.foreachEntry { (key: K, value: V) => + put0(key, value, improveHash(key.##), getOld = false) + } + this + case _ => + super.addAll(xs) + } + } + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundNode: Node[K, V] = null + var previousNode: Node[K, V] = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousNode = prev + foundNode = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findNode(nd, nd.next, k, h) + } + + findNode(null, nd, key, hash) + } + + val previousValue = foundNode match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousNode != null) previousNode.next = foundNode.next + else table(indexedHash) = foundNode.next + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundNode.value = newValue + } + nextValue + } + } + + override def subtractAll(xs: IterableOnce[K]^): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[K] => + hs.foreachWithHashWhile { (k, h) => + remove0(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[K] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[K] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds a key-value pair to this map + * + * @param key the key to add + * @param value the value to add + * @param hash the **improved** hashcode of `key` (see computeHash) + * @param getOld if true, then the previous value for `key` will be returned, otherwise, false + */ + private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = new Node[K, V](key, hash, value, null) + case old => + var prev: Node[K, V] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if(getOld) Some(old) else null + } + prev = n + n = n.next + } + if(prev eq null) table(idx) = new Node(key, hash, value, old) + else prev.next = new Node(key, hash, value, prev.next) + } + contentSize += 1 + null + } + + private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + private[this] var node: Node[K, V] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[K, V]): A + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): A = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[(K, V)] = + if(size == 0) Iterator.empty + else new HashMapIterator[(K, V)] { + protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) + } + + override def keysIterator: Iterator[K] = + if(size == 0) Iterator.empty + else new HashMapIterator[K] { + protected[this] def extract(nd: Node[K, V]) = nd.key + } + + override def valuesIterator: Iterator[V] = + if(size == 0) Iterator.empty + else new HashMapIterator[V] { + protected[this] def extract(nd: Node[K, V]) = nd.value + } + + + /** Returns an iterator over the nodes stored in this HashMap */ + private[collection] def nodeIterator: Iterator[Node[K, V]] = + if(size == 0) Iterator.empty + else new HashMapIterator[Node[K, V]] { + protected[this] def extract(nd: Node[K, V]) = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). + asInstanceOf[S with EfficientSplit] + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + if (newlen < 0) + throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[K, V] = preLow + var lastHigh: Node[K, V] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd.value) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd.value + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findNode(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findNode(key, hash) + } + if(nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def remove(key: K): Option[V] = remove0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreachEntry(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) + + override def filterInPlace(p: (K, V) => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key, head.value)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key, next.value)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) + private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val len = table.length + var i = 0 + while (i < len) { + var n = table(i) + while (n ne null) { + n.value = f(n.key, n.value) + n = n.next + } + i += 1 + } + this + } + + override def mapFactory: MapFactory[HashMap] = HashMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "HashMap" + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new HashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[K, V]): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + def empty[sealed K, sealed V]: HashMap[K, V] = new HashMap[K, V] + + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashMap[K, V](cap, defaultLoadFactor).addAll(it) + } + + def newBuilder[sealed K, sealed V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[sealed K, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { + def key: K = _key + def hash: Int = _hash + def value: V = _value + def value_= (v: V): Unit = _value = v + def next: Node[K, V] = _next + def next_= (n: Node[K, V]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K, V] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: ((K, V)) => U): Unit = { + f((_key, _value)) + if(_next ne null) _next.foreach(f) + } + + @tailrec + def foreachEntry[U](f: (K, V) => U): Unit = { + f(_key, _value) + if(_next ne null) _next.foreachEntry(f) + } + + override def toString = s"Node($key, $value, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala new file mode 100644 index 000000000000..a253e8738b26 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala @@ -0,0 +1,510 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + + +/** This class implements mutable maps using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `LinkedHashMap` + * @define coll linked hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") +class LinkedHashMap[sealed K, sealed V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] + with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap + + // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper + // would not return the elements in insertion order + + private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] + + private[collection] def _firstEntry: Entry = firstEntry + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: (K, V) = + if (size > 0) (lastEntry.key, lastEntry.value) + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") + + override def lastOption: Option[(K, V)] = + if (size > 0) Some((lastEntry.key, lastEntry.value)) + else None + + override def head: (K, V) = + if (size > 0) (firstEntry.key, firstEntry.value) + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") + + override def headOption: Option[(K, V)] = + if (size > 0) Some((firstEntry.key, firstEntry.value)) + else None + + override def size = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def get(key: K): Option[V] = { + val e = findEntry(key) + if (e == null) None + else Some(e.value) + } + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def contains(key: K): Boolean = { + if (getClass eq classOf[LinkedHashMap[_, _]]) + findEntry(key) != null + else + super.contains(key) // A subclass might override `get`, use the default implementation `contains`. + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def remove(key: K): Option[V] = removeEntry0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findEntry(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findEntry(key, hash) + } + if (nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if (contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def removeEntry0(elem: K, hash: Int): Entry = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: K): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + def addOne(kv: (K, V)): this.type = { + put(kv._1, kv._2) + this + } + + def subtractOne(key: K): this.type = { + remove(key) + this + } + + private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[(K, V)] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[(K, V)] { + def extract(nd: Entry): (K, V) = (nd.key, nd.value) + } + + protected class LinkedKeySet extends KeySet { + override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet + } + + override def keySet: collection.Set[K] = new LinkedKeySet + + override def keysIterator: Iterator[K] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[K] { + def extract(nd: Entry): K = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[Entry] { + def extract(nd: Entry): Entry = nd + } + + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundEntry: Entry = null + var previousEntry: Entry = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousEntry = prev + foundEntry = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findEntry(nd, nd.next, k, h) + } + + findEntry(null, nd, key, hash) + } + + val previousValue = foundEntry match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousEntry != null) previousEntry.next = foundEntry.next + else table(indexedHash) = foundEntry.next + deleteEntry(foundEntry) + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundEntry.value = newValue + } + nextValue + } + } + + override def valuesIterator: Iterator[V] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[V] { + def extract(nd: Entry): V = nd.value + } + + + override def foreach[U](f: ((K, V)) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f((cur.key, cur.value)) + cur = cur.later + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key, cur.value) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { + val e = new Entry(key, hash, value) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = createNewEntry(key, hash, value) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if (getOld) Some(old) else null + } + prev = n + n = n.next + } + val nnode = createNewEntry(key, hash, value) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + null + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new LinkedHashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashMap" +} + +/** $factoryInfo + * @define Coll `LinkedHashMap` + * @define coll linked hash map + */ +@SerialVersionUID(3L) +object LinkedHashMap extends MapFactory[LinkedHashMap] { + + def empty[sealed K, sealed V] = new LinkedHashMap[K, V] + + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^) = { + val newlhm = empty[K, V] + newlhm.sizeHint(it.knownSize) + newlhm.addAll(it) + newlhm + } + + def newBuilder[sealed K, sealed V] = new GrowableBuilder(empty[K, V]) + + /** Class for the linked hash map entry, used internally. + */ + private[mutable] final class LinkedEntry[sealed K, sealed V](val key: K, val hash: Int, var value: V) { + var earlier: LinkedEntry[K, V] = null + var later: LinkedEntry[K, V] = null + var next: LinkedEntry[K, V] = null + + @tailrec + final def findEntry(k: K, h: Int): LinkedEntry[K, V] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala new file mode 100644 index 000000000000..a895034a852c --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala @@ -0,0 +1,349 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements mutable sets using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam A the type of the elements contained in this set. + * + * @define Coll `LinkedHashSet` + * @define coll linked hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") +class LinkedHashSet[sealed A] + extends AbstractSet[A] + with SetOps[A, LinkedHashSet, LinkedHashSet[A]] + with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] + with IterableFactoryDefaults[A, LinkedHashSet] + with DefaultSerializable { + + override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet + + // stepper is not overridden to use XTableStepper because that stepper would not return the + // elements in insertion order + + /*private*/ type Entry = LinkedHashSet.Entry[A] + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: A = + if (size > 0) lastEntry.key + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") + + override def lastOption: Option[A] = + if (size > 0) Some(lastEntry.key) + else None + + override def head: A = + if (size > 0) firstEntry.key + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") + + override def headOption: Option[A] = + if (size > 0) Some(firstEntry.key) + else None + + override def size: Int = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def contains(elem: A): Boolean = findEntry(elem) ne null + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def add(elem: A): Boolean = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(elem) + put0(elem, hash, index(hash)) + } + + def addOne(elem: A): this.type = { + add(elem) + this + } + + def subtractOne(elem: A): this.type = { + remove(elem) + this + } + + override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) + + private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[A] = new LinkedHashSetIterator[A] { + override def extract(nd: Entry): A = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { + override def extract(nd: Entry): Entry = nd + } + + override def foreach[U](f: A => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt + + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: A): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: A, hash: Int): Entry = { + val e = new Entry(key, hash) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { + table(idx) match { + case null => + table(idx) = createNewEntry(elem, hash) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + val nnode = createNewEntry(elem, hash) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + true + } + + private[this] def remove0(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[A], 0) + val preHigh = new Entry(null.asInstanceOf[A], 0) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + val setHashIterator = + if (isEmpty) this.iterator + else { + new LinkedHashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = unimproveHash(nd.hash) + this + } + } + } + MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashSet" +} + +/** $factoryInfo + * @define Coll `LinkedHashSet` + * @define coll linked hash set + */ +@SerialVersionUID(3L) +object LinkedHashSet extends IterableFactory[LinkedHashSet] { + + override def empty[sealed A]: LinkedHashSet[A] = new LinkedHashSet[A] + + def from[sealed E](it: collection.IterableOnce[E]^) = { + val newlhs = empty[E] + newlhs.sizeHint(it.knownSize) + newlhs.addAll(it) + newlhs + } + + def newBuilder[sealed A] = new GrowableBuilder(empty[A]) + + /** Class for the linked hash set entry, used internally. + */ + private[mutable] final class Entry[sealed A](val key: A, val hash: Int) { + var earlier: Entry[A] = null + var later: Entry[A] = null + var next: Entry[A] = null + + @tailrec + final def findEntry(k: A, h: Int): Entry[A] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} + From 20f3a97dd68b0e7bd528a6bb764f0b6b421464c8 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 16:30:26 +0100 Subject: [PATCH 085/216] Add LongMap + friends to stdlib --- .../collection/mutable/ImmutableBuilder.scala | 32 + .../stdlib/collection/mutable/ListMap.scala | 83 +++ .../stdlib/collection/mutable/LongMap.scala | 674 ++++++++++++++++++ 3 files changed, 789 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ListMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LongMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala new file mode 100644 index 000000000000..1af98162e9f3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + + +/** + * Reusable builder for immutable collections + */ +abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) + extends ReusableBuilder[A, C] { + + protected var elems: C = empty + + def clear(): Unit = { elems = empty } + + def result(): C = elems + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala new file mode 100644 index 000000000000..8ddbc264e47b --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala @@ -0,0 +1,83 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.List +import language.experimental.captureChecking + +/** A simple mutable map backed by a list, so it preserves insertion order. + * + * @tparam K the type of the keys contained in this list map. + * @tparam V the type of the values assigned to keys in this list map. + * + * @define Coll `mutable.ListMap` + * @define coll mutable list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +class ListMap[sealed K, sealed V] + extends AbstractMap[K, V] + with MapOps[K, V, ListMap, ListMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + private[this] var elems: List[(K, V)] = List() + private[this] var siz: Int = 0 + + def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) + def iterator: Iterator[(K, V)] = elems.iterator + + final override def addOne(kv: (K, V)) = { + val (e, key0) = remove(kv._1, elems, List()) + elems = (key0, kv._2) :: e + siz += 1; this + } + + final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } + + @tailrec + private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { + if (elems.isEmpty) (acc, key) + else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } + else remove(key, elems.tail, elems.head :: acc) + } + + final override def clear(): Unit = { elems = List(); siz = 0 } + + final override def size: Int = siz + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override protected[this] def stringPrefix = "ListMap" +} + +/** $factoryInfo + * @define Coll `mutable.ListMap` + * @define coll mutable list map + */ +@SerialVersionUID(3L) +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +object ListMap extends MapFactory[ListMap] { + def empty[sealed K, sealed V]: ListMap[K, V] = new ListMap[K, V] + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[sealed K, sealed V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) +} diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala new file mode 100644 index 000000000000..2c757160ec77 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala @@ -0,0 +1,674 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions +import language.experimental.captureChecking + +/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically substantially faster with `LongMap` than [[HashMap]]. Methods + * that act on the whole map, including `foreach` and `map` are not in + * general expected to be faster than with a generic map, save for those + * that take particular advantage of the internal structure of the map: + * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `LongMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29 entries (approximately + * 500 million). The maximum capacity is 2^30, but performance will degrade + * rapidly as 2^30 is approached. + * + */ +final class LongMap[sealed V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[Long, V] + with MapOps[Long, V, Map, LongMap[V]] + with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] + with Serializable { + import LongMap._ + + def this() = this(LongMap.exceptionDefault, 16, true) + + // TODO: override clear() with an optimization more tailored for efficiency. + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) + + /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true) + + /** Creates a new `LongMap` with an initial buffer of specified size. + * + * A LongMap can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `LongMap` with specified default values and initial buffer size. */ + def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var extraKeys: Int = 0 + private[this] var zeroValue: AnyRef = null + private[this] var minValue: AnyRef = null + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _keys: Array[Long] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int) = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] + ): Unit = { + mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz + } + + override def size: Int = _size + (extraKeys+1)/2 + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override def empty: LongMap[V] = new LongMap() + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def toIndex(k: Long): Int = { + // Part of the MurmurHash3 32 bit finalizer + val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt + val x = (h ^ (h >>> 16)) * 0x85EBCA6B + (x ^ (x >>> 13)) & mask + } + + private def seekEmpty(k: Long): Int = { + var e = toIndex(k) + var x = 0 + while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e + } + + private def seekEntry(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e | MissingBit + } + + private def seekEntryOrOpen(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (q == 0) return e | MissingBit + val o = e | MissVacant + while ({ q = _keys(e); if (q==k) return e; q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + o + } + + override def contains(key: Long): Boolean = { + if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 + else seekEntry(key) >= 0 + } + + override def get(key: Long): Option[V] = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) None + else if (key == 0) Some(zeroValue.asInstanceOf[V]) + else Some(minValue.asInstanceOf[V]) + } + else { + val i = seekEntry(key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + } + + override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) default + else if (key == 0) zeroValue.asInstanceOf[V1] + else minValue.asInstanceOf[V1] + } + else { + val i = seekEntry(key) + if (i < 0) default else _values(i).asInstanceOf[V1] + } + } + + override def getOrElseUpdate(key: Long, defaultValue: => V): V = { + if (key == -key) { + val kbits = (key>>>63).toInt + 1 + if ((kbits & extraKeys) == 0) { + val value = defaultValue + extraKeys |= kbits + if (key == 0) zeroValue = value.asInstanceOf[AnyRef] + else minValue = value.asInstanceOf[AnyRef] + value + } + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + var i = seekEntryOrOpen(key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val ok = _keys + val ans = defaultValue + if (ok ne _keys) { + i = seekEntryOrOpen(key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead. + */ + override def apply(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + } + + /** The user-supplied default value for the key. Throws an exception + * if no other default behavior was specified. + */ + override def default(key: Long) = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val ok = _keys + val ov = _values + mask = newMask + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < ok.length) { + val k = ok(i) + if (k != -k) { + val j = seekEmpty(k) + _keys(j) = k + _values(j) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: Long, value: V): Option[V] = { + if (key == -key) { + if (key == 0) { + val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + ans + } + else { + val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + ans + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to a `LongMap`. + */ + override def update(key: Long, value: V): Unit = { + if (key == -key) { + if (key == 0) { + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + } + else { + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + } + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: Long, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: Long): this.type = { + if (key == -key) { + if (key == 0L) { + extraKeys &= 0x2 + zeroValue = null + } + else { + extraKeys &= 0x1 + minValue = null + } + } + else { + val i = seekEntry(key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _keys(i) = Long.MinValue + _values(i) = null + } + } + this + } + + def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var nextPair: (Long, V) = + if (extraKeys==0) null + else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) + else (Long.MinValue, minValue.asInstanceOf[V]) + + private[this] var anotherPair: (Long, V) = + if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) + else null + + private[this] var index = 0 + + def hasNext: Boolean = nextPair != null || (index < kz.length && { + var q = kz(index) + while (q == -q) { + index += 1 + if (index >= kz.length) return false + q = kz(index) + } + nextPair = (kz(index), vz(index).asInstanceOf[V]) + index += 1 + true + }) + def next() = { + if (nextPair == null && !hasNext) throw new NoSuchElementException("next") + val ans = nextPair + if (anotherPair != null) { + nextPair = anotherPair + anotherPair = null + } + else nextPair = null + ans + } + } + + // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. + override def keysIterator: Iterator[Long] = super.keysIterator + override def valuesIterator: Iterator[V] = super.valuesIterator + + override def foreach[U](f: ((Long,V)) => U): Unit = { + if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) + if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f((k, _values(i).asInstanceOf[V])) + } + i += 1 + } + } + + override def foreachEntry[U](f: (Long,V) => U): Unit = { + if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k, _values(i).asInstanceOf[V]) + } + i += 1 + } + } + + override def clone(): LongMap[V] = { + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val lm = new LongMap[V](defaultEntry, 1, false) + lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) + lm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + lm += kv + lm + } + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [sealed V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + xs.iterator.foreach(kv => lm += kv) + lm + } + + override def ++ [sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = + clone().asInstanceOf[LongMap[V1]].addOne(key, value) + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: Long => A): Unit = { + if ((extraKeys & 1) == 1) f(0L) + if ((extraKeys & 2) == 2) f(Long.MinValue) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k) + } + i += 1 + } + } + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = { + if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(_values(i).asInstanceOf[V]) + } + i += 1 + } + } + + /** Creates a new `LongMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[sealed V1](f: V => V1): LongMap[V1] = { + val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) + lm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] + if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + def map[sealed V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[sealed V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + def collect[sealed V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) + + override protected[this] def className = "LongMap" +} + +object LongMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString) + + /** A builder for instances of `LongMap`. + * + * This builder can be reused to create multiple instances. + */ + final class LongMapBuilder[sealed V] extends ReusableBuilder[(Long, V), LongMap[V]] { + private[collection] var elems: LongMap[V] = new LongMap[V] + override def addOne(entry: (Long, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new LongMap[V] + def result(): LongMap[V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `LongMap` with zero or more key/value pairs. */ + def apply[sealed V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + + private def buildFromIterableOnce[sealed V](elems: IterableOnce[(Long, V)]^): LongMap[V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val lm = new LongMap[V](sz * 2) + elems.iterator.foreach{ case (k,v) => lm(k) = v } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new empty `LongMap`. */ + def empty[sealed V]: LongMap[V] = new LongMap[V] + + /** Creates a new empty `LongMap` with the supplied default */ + def withDefault[sealed V](default: Long -> V): LongMap[V] = new LongMap[V](default) + + /** Creates a new `LongMap` from an existing source collection. A source collection + * which is already a `LongMap` gets cloned. + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new `LongMap` with the elements of `source` + */ + def from[sealed V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match { + case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] + case _ => buildFromIterableOnce(source) + } + + def newBuilder[sealed V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + + /** Creates a new `LongMap` from arrays of keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[sealed V](keys: Array[Long], values: Array[V]): LongMap[V] = { + val sz = math.min(keys.length, values.length) + val lm = new LongMap[V](sz * 2) + var i = 0 + while (i < sz) { lm(keys(i)) = values(i); i += 1 } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new `LongMap` from keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[sealed V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { + val sz = math.min(keys.size, values.size) + val lm = new LongMap[V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() + if (lm.size < (sz >> 3)) lm.repack() + lm + } + + implicit def toFactory[sealed V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[sealed V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} From ae605d671dc659762e84d98417dc8f46275759fa Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 16:34:55 +0100 Subject: [PATCH 086/216] Add WeakHashMap and MultiMap to stdlib --- .../stdlib/collection/mutable/MultiMap.scala | 116 ++++++++++++++++++ .../collection/mutable/WeakHashMap.scala | 56 +++++++++ 2 files changed, 172 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/MultiMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala new file mode 100644 index 000000000000..0b250a5548ef --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -0,0 +1,116 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import language.experimental.captureChecking + +/** A trait for mutable maps with multiple values assigned to a key. + * + * This class is typically used as a mixin. It turns maps which map `K` + * to `Set[V]` objects into multimaps that map `K` to `V` objects. + * + * @example {{{ + * // first import all necessary types from package `collection.mutable` + * import collection.mutable.{ HashMap, MultiMap, Set } + * + * // to create a `MultiMap` the easiest way is to mixin it into a normal + * // `Map` instance + * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] + * + * // to add key-value pairs to a multimap it is important to use + * // the method `addBinding` because standard methods like `+` will + * // overwrite the complete key-value pair instead of adding the + * // value to the existing key + * mm.addBinding(1, "a") + * mm.addBinding(2, "b") + * mm.addBinding(1, "c") + * + * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` + * + * // to check if the multimap contains a value there is method + * // `entryExists`, which allows to traverse the including set + * mm.entryExists(1, _ == "a") == true + * mm.entryExists(1, _ == "b") == false + * mm.entryExists(2, _ == "b") == true + * + * // to remove a previous added value there is the method `removeBinding` + * mm.removeBinding(1, "a") + * mm.entryExists(1, _ == "a") == false + * }}} + * + * @define coll multimap + * @define Coll `MultiMap` + */ +@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") +trait MultiMap[K, V] extends Map[K, Set[V]] { + /** Creates a new set. + * + * Classes that use this trait as a mixin can override this method + * to have the desired implementation of sets assigned to new keys. + * By default this is `HashSet`. + * + * @return An empty set of values of type `V`. + */ + protected def makeSet: Set[V] = new HashSet[V] + + /** Assigns the specified `value` to a specified `key`. If the key + * already has a binding to equal to `value`, nothing is changed; + * otherwise a new binding is added for that `key`. + * + * @param key The key to which to bind the new value. + * @param value The value to bind to the key. + * @return A reference to this multimap. + */ + def addBinding(key: K, value: V): this.type = { + get(key) match { + case None => + val set = makeSet + set += value + this(key) = set + case Some(set) => + set += value + } + this + } + + /** Removes the binding of `value` to `key` if it exists, otherwise this + * operation doesn't have any effect. + * + * If this was the last value assigned to the specified key, the + * set assigned to that key will be removed as well. + * + * @param key The key of the binding. + * @param value The value to remove. + * @return A reference to this multimap. + */ + def removeBinding(key: K, value: V): this.type = { + get(key) match { + case None => + case Some(set) => + set -= value + if (set.isEmpty) this -= key + } + this + } + + /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. + * + * @param key The key for which the predicate is checked. + * @param p The predicate which a value assigned to the key must satisfy. + * @return A boolean if such a binding exists + */ + def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { + case None => false + case Some(set) => set exists p + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala new file mode 100644 index 000000000000..a9498b7fc69b --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala @@ -0,0 +1,56 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} +import language.experimental.captureChecking + +/** A hash map with references to entries which are weakly reachable. Entries are + * removed from this map when the key is no longer (strongly) referenced. This class wraps + * `java.util.WeakHashMap`. + * + * @tparam K type of keys contained in this map + * @tparam V type of values associated with the keys + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] + * section on `Weak Hash Maps` for more information. + * + * @define Coll `WeakHashMap` + * @define coll weak hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +class WeakHashMap[sealed K, sealed V] extends JMapWrapper[K, V](new java.util.WeakHashMap) + with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] + with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { + override def empty = new WeakHashMap[K, V] + override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "WeakHashMap" +} + +/** $factoryInfo + * @define Coll `WeakHashMap` + * @define coll weak hash map + */ +@SerialVersionUID(3L) +object WeakHashMap extends MapFactory[WeakHashMap] { + def empty[sealed K, sealed V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[sealed K, sealed V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) +} + From b0d3bbc8e1c9e512dbc4fe1a489fd3333fff16e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 16:40:26 +0100 Subject: [PATCH 087/216] Add UnrolledBuffer to stdlib --- .../collection/mutable/UnrolledBuffer.scala | 443 ++++++++++++++++++ .../stdlib/collection/mutable/package.scala | 42 ++ 2 files changed, 485 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/package.scala diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala new file mode 100644 index 000000000000..2015b76a31b8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala @@ -0,0 +1,443 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag +import scala.collection.immutable.Nil +import language.experimental.captureChecking + +/** A buffer that stores elements in an unrolled linked list. + * + * Unrolled linked lists store elements in linked fixed size + * arrays. + * + * Unrolled buffers retain locality and low memory overhead + * properties of array buffers, but offer much more efficient + * element addition, since they never reallocate and copy the + * internal array. + * + * However, they provide `O(n/m)` complexity random access, + * where `n` is the number of elements, and `m` the size of + * internal array chunks. + * + * Ideal to use when: + * - elements are added to the buffer and then all of the + * elements are traversed sequentially + * - two unrolled buffers need to be concatenated (see `concat`) + * + * Better than singly linked lists for random access, but + * should still be avoided for such a purpose. + * + * @define coll unrolled buffer + * @define Coll `UnrolledBuffer` + * + */ +@SerialVersionUID(3L) +sealed class UnrolledBuffer[sealed T](implicit val tag: ClassTag[T]) + extends AbstractBuffer[T] + with Buffer[T] + with Seq[T] + with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] + with Builder[T, UnrolledBuffer[T]] + with DefaultSerializable { + + import UnrolledBuffer.Unrolled + + @transient private var headptr = newUnrolled + @transient private var lastptr = headptr + @transient private var sz = 0 + + private[collection] def headPtr = headptr + private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head + private[collection] def lastPtr = lastptr + private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last + private[collection] def size_=(s: Int) = sz = s + + protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer + protected def iterableEvidence: ClassTag[T] = tag + + override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged + + protected def newUnrolled = new Unrolled[T](this) + + // The below would allow more flexible behavior without requiring inheritance + // that is risky because all the important internals are private. + // private var myLengthPolicy: Int => Int = x => x + // + // /** Specifies how the array lengths should vary. + // * + // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length + // * policy can be given that changes this scheme to, for instance, an + // * exponential growth. + // * + // * @param nextLength computes the length of the next array from the length of the latest one + // */ + // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } + private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) + + def classTagCompanion = UnrolledBuffer + + /** Concatenates the target unrolled buffer to this unrolled buffer. + * + * The specified buffer `that` is cleared after this operation. This is + * an O(1) operation. + * + * @param that the unrolled buffer whose elements are added to this buffer + */ + def concat(that: UnrolledBuffer[T]) = { + // bind the two together + if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr + + // update size + sz += that.sz + + // `that` is no longer usable, so clear it + // here we rely on the fact that `clear` allocates + // new nodes instead of modifying the previous ones + that.clear() + + // return a reference to this + this + } + + def addOne(elem: T) = { + lastptr = lastptr.append(elem) + sz += 1 + this + } + + def clear(): Unit = { + headptr = newUnrolled + lastptr = headptr + sz = 0 + } + + def iterator: Iterator[T] = new AbstractIterator[T] { + var pos: Int = -1 + var node: Unrolled[T] = headptr + scan() + + private def scan(): Unit = { + pos += 1 + while (pos >= node.size) { + pos = 0 + node = node.next + if (node eq null) return + } + } + def hasNext = node ne null + def next() = if (hasNext) { + val r = node.array(pos) + scan() + r + } else Iterator.empty.next() + } + + // this should be faster than the iterator + override def foreach[U](f: T => U) = headptr.foreach(f) + + def result() = this + + def length = sz + + override def knownSize: Int = sz + + def apply(idx: Int) = + if (idx >= 0 && idx < sz) headptr(idx) + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def update(idx: Int, newelem: T) = + if (idx >= 0 && idx < sz) headptr(idx) = newelem + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def mapInPlace(f: T => T): this.type = { + headptr.mapInPlace(f) + this + } + + def remove(idx: Int) = + if (idx >= 0 && idx < sz) { + sz -= 1 + headptr.remove(idx, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + @tailrec final def remove(idx: Int, count: Int): Unit = + if (count > 0) { + remove(idx) + remove(idx, count-1) + } + + def prepend(elem: T) = { + headptr = headptr prepend elem + sz += 1 + this + } + + def insert(idx: Int, elem: T): Unit = + insertAll(idx, elem :: Nil) + + def insertAll(idx: Int, elems: IterableOnce[T]^): Unit = + if (idx >= 0 && idx <= sz) { + sz += headptr.insertAll(idx, elems, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + override def subtractOne(elem: T): this.type = { + if (headptr.subtractOne(elem, this)) { + sz -= 1 + } + this + } + + def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.defaultWriteObject + out writeInt sz + for (elem <- this) out writeObject elem + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + in.defaultReadObject + + val num = in.readInt + + headPtr = newUnrolled + lastPtr = headPtr + sz = 0 + var i = 0 + while (i < num) { + this += in.readObject.asInstanceOf[T] + i += 1 + } + } + + override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this + + override protected[this] def className = "UnrolledBuffer" +} + + +@SerialVersionUID(3L) +object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => + + val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) + + def empty[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + def from[sealed A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source) + + def newBuilder[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + final val waterline: Int = 50 + + final def waterlineDenom: Int = 100 + + @deprecated("Use waterlineDenom instead.", "2.13.0") + final val waterlineDelim: Int = waterlineDenom + + private[collection] val unrolledlength = 32 + + /** Unrolled buffer node. + */ + class Unrolled[sealed T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) + private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) + + private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) + + // adds and returns itself or the new unrolled if full + @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { + array(size) = elem + size += 1 + this + } else { + next = new Unrolled[T](0, new Array[T](nextlength), null, buff) + next append elem + } + def foreach[U](f: T => U): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + def mapInPlace(f: T => T): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + chunkarr(i) = f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + @tailrec final def apply(idx: Int): T = + if (idx < size) array(idx) else next.apply(idx - size) + @tailrec final def update(idx: Int, newelem: T): Unit = + if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) + @tailrec final def locate(idx: Int): Unrolled[T] = + if (idx < size) this else next.locate(idx - size) + def prepend(elem: T) = if (size < array.length) { + // shift the elements of the array right + // then insert the element + shiftright() + array(0) = elem + size += 1 + this + } else { + // allocate a new node and store element + // then make it point to this + val newhead = new Unrolled[T](buff) + newhead append elem + newhead.next = this + newhead + } + // shifts right assuming enough space + private def shiftright(): Unit = { + var i = size - 1 + while (i >= 0) { + array(i + 1) = array(i) + i -= 1 + } + } + // returns pointer to new last if changed + @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = + if (idx < size) { + // remove the element + // then try to merge with the next bucket + val r = array(idx) + shiftleft(idx) + size -= 1 + if (tryMergeWithNext()) buffer.lastPtr = this + r + } else next.remove(idx - size, buffer) + + @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { + var i = 0 + while (i < size) { + if(array(i) == elem) { + remove(i, buffer) + return true + } + i += 1 + } + if(next ne null) next.subtractOne(elem, buffer) else false + } + + // shifts left elements after `leftb` (overwrites `leftb`) + private def shiftleft(leftb: Int): Unit = { + var i = leftb + while (i < (size - 1)) { + array(i) = array(i + 1) + i += 1 + } + nullout(i, i + 1) + } + protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { + // copy the next array, then discard the next node + Array.copy(next.array, 0, array, size, next.size) + size = size + next.size + next = next.next + if (next eq null) true else false // checks if last node was thrown out + } else false + + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = { + if (idx < size) { + // divide this node at the appropriate position and insert all into head + // update new next + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + Array.copy(array, idx, newnextnode.array, 0, size - idx) + newnextnode.size = size - idx + newnextnode.next = next + + // update this + nullout(idx, size) + size = idx + next = null + + // insert everything from iterable to this + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + curr.next = newnextnode + + // try to merge the last node of this with the newnextnode and fix tail pointer if needed + if (curr.tryMergeWithNext()) buffer.lastPtr = curr + else if (newnextnode.next eq null) buffer.lastPtr = newnextnode + appended + } + else if (idx == size || (next eq null)) { + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + appended + } + else next.insertAll(idx - size, t, buffer) + } + + private def nullout(from: Int, until: Int): Unit = { + var idx = from + while (idx < until) { + array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! + idx += 1 + } + } + + // assumes this is the last node + // `thathead` and `thatlast` are head and last node + // of the other unrolled list, respectively + def bind(thathead: Unrolled[T]) = { + assert(next eq null) + next = thathead + tryMergeWithNext() + } + + override def toString: String = + array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") + } +} + +// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: +// Todo -- revisit whether inheritance is the best way to achieve this functionality +private[collection] class DoublingUnrolledBuffer[sealed T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { + override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz + override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala new file mode 100644 index 000000000000..d658ca5bc65a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/package.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + + +package object mutable { + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + type WrappedArray[X] = ArraySeq[X] + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + val WrappedArray = ArraySeq + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + type ArrayStack[X] = Stack[X] + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + val ArrayStack = Stack + + @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") + type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] + + @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") + type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] + + @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") + type IndexedOptimizedSeq[A] = IndexedSeq[A] + + @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") + type IndexedOptimizedBuffer[A] = IndexedBuffer[A] +} From 09e08685b83bbfc2f4e88b1681ee4028c7b93ea2 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 16:50:11 +0100 Subject: [PATCH 088/216] Add mutable TreeMap and RedBlackTree to stdlib --- .../collection/mutable/RedBlackTree.scala | 653 ++++++++++++++++++ .../stdlib/collection/mutable/TreeMap.scala | 258 +++++++ .../stdlib/collection/mutable/TreeSet.scala | 8 +- 3 files changed, 915 insertions(+), 4 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/TreeMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala new file mode 100644 index 000000000000..1f320f832cdf --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala @@ -0,0 +1,653 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.annotation.tailrec +import collection.{AbstractIterator, Iterator} +import java.lang.String +import language.experimental.captureChecking + +/** + * An object containing the red-black tree implementation used by mutable `TreeMaps`. + * + * The trees implemented in this object are *not* thread safe. + */ +private[collection] object RedBlackTree { + + // ---- class structure ---- + + // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. + // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. + // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) + // on the size of the range. + + final class Tree[sealed A, sealed B](var root: Node[A, B], var size: Int) { + def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) + } + + final class Node[sealed A, sealed B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { + override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" + } + + object Tree { + def empty[sealed A, sealed B]: Tree[A, B] = new Tree(null, 0) + } + + object Node { + + @`inline` def apply[sealed A, sealed B](key: A, value: B, red: Boolean, + left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, left, right, parent) + + @`inline` def leaf[sealed A, sealed B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, null, null, parent) + + def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) + } + + // ---- getters ---- + + def isRed(node: Node[_, _]) = (node ne null) && node.red + def isBlack(node: Node[_, _]) = (node eq null) || !node.red + + // ---- size ---- + + def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) + def size(tree: Tree[_, _]): Int = tree.size + def isEmpty(tree: Tree[_, _]) = tree.root eq null + def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } + + // ---- search ---- + + def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { + case null => None + case node => Some(node.value) + } + + @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = + if (node eq null) null + else { + val cmp = ord.compare(key, node.key) + if (cmp < 0) getNode(node.left, key) + else if (cmp > 0) getNode(node.right, key) + else node + } + + def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null + + def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def minNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else minNodeNonNull(node) + + @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def maxNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else maxNodeNonNull(node) + + @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.right eq null) node else maxNodeNonNull(node.right) + + /** + * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such + * node. + */ + def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp <= 0) y else successor(y) + } + } + + /** + * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp > 0) y else predecessor(y) + } + } + + // ---- insertion ---- + + def insert[sealed A, sealed B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { + var y: Node[A, B] = null + var x = tree.root + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + + if (cmp == 0) y.value = value + else { + val z = Node.leaf(key, value, red = true, y) + + if (y eq null) tree.root = z + else if (cmp < 0) y.left = z + else y.right = z + + fixAfterInsert(tree, z) + tree.size += 1 + } + } + + private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + rotateLeft(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateRight(tree, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + rotateRight(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateLeft(tree, z.parent.parent) + } + } + } + tree.root.red = false + } + + // ---- deletion ---- + + def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { + val z = getNode(tree.root, key) + if (z ne null) { + var y = z + var yIsRed = y.red + var x: Node[A, B] = null + var xParent: Node[A, B] = null + + if (z.left eq null) { + x = z.right + transplant(tree, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + transplant(tree, z, z.left) + xParent = z.parent + } + else { + y = minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + transplant(tree, y, y.right) + y.right = z.right + y.right.parent = y + } + transplant(tree, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) fixAfterDelete(tree, x, xParent) + tree.size -= 1 + } + } + + private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { + var x = node + var xParent = parent + while ((x ne tree.root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateLeft(tree, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + rotateRight(tree, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + rotateLeft(tree, xParent) + x = tree.root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateRight(tree, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + rotateLeft(tree, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + rotateRight(tree, xParent) + x = tree.root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + } + + // ---- helpers ---- + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + /** + * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, + * therefore, the first node), this method returns `null`. + */ + private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.left ne null) maxNodeNonNull(node.left) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.left)) { + x = y + y = y.parent + } + y + } + } + + private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.right ne null) + val y = x.right + x.right = y.left + + if (y.left ne null) y.left.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.left) x.parent.left = y + else x.parent.right = y + + y.left = x + x.parent = y + } + + private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.left ne null) + val y = x.left + x.left = y.right + + if (y.right ne null) y.right.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.right) x.parent.right = y + else x.parent.left = y + + y.right = x + x.parent = y + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { + if (to.parent eq null) tree.root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + + if (from ne null) from.parent = to.parent + } + + // ---- tree traversal ---- + + def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) + + private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = + if (node ne null) foreachNodeNonNull(node, f) + + private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { + if (node.left ne null) foreachNodeNonNull(node.left, f) + f((node.key, node.value)) + if (node.right ne null) foreachNodeNonNull(node.right, f) + } + + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + def g(node: Node[A, _]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + def g(node: Node[A, B]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key, node.value) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) + + private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = + if (node ne null) transformNodeNonNull(node, f) + + private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { + if (node.left ne null) transformNodeNonNull(node.left, f) + node.value = f(node.key, node.value) + if (node.right ne null) transformNodeNonNull(node.right, f) + } + + def iterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = + new EntriesIterator(tree, start, end) + + def keysIterator[sealed A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = + new KeysIterator(tree, start, end) + + def valuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = + new ValuesIterator(tree, start, end) + + private[this] abstract class TreeIterator[sealed A, sealed B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) + (implicit ord: Ordering[A]) extends AbstractIterator[R] { + + protected def nextResult(node: Node[A, B]): R + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): R = nextNode match { + case null => throw new NoSuchElementException("next on empty iterator") + case node => + nextNode = successor(node) + setNullIfAfterEnd() + nextResult(node) + } + + private[this] var nextNode: Node[A, B] = start match { + case None => minNode(tree.root) + case Some(from) => minNodeAfter(tree.root, from) + } + + private[this] def setNullIfAfterEnd(): Unit = + if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) + nextNode = null + + setNullIfAfterEnd() + } + + private[this] final class EntriesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, (A, B)](tree, start, end) { + + def nextResult(node: Node[A, B]) = (node.key, node.value) + } + + private[this] final class KeysIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, A](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.key + } + + private[this] final class ValuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, B](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.value + } + + // ---- debugging ---- + + /** + * Checks if the tree is in a valid state. That happens if: + * - It is a valid binary search tree; + * - All red-black properties are satisfied; + * - All non-null nodes have their `parent` reference correct; + * - The size variable in `tree` corresponds to the actual size of the tree. + */ + def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = + isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size + + /** + * Returns true if all non-null nodes have their `parent` reference correct. + */ + private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { + + def hasProperParentRefs(node: Node[A, B]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (node.left.parent ne node) || + (node.right ne null) && (node.right.parent ne node)) false + else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) + } + } + + if(tree.root eq null) true + else (tree.root.parent eq null) && hasProperParentRefs(tree.root) + } + + /** + * Returns true if this node follows the properties of a binary search tree. + */ + private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || + (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false + else isValidBST(node.left) && isValidBST(node.right) + } + } + + /** + * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red + * nodes are black and if the path from any node to any of its null children has the same number of black nodes. + */ + private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { + + def noRedAfterRed(node: Node[A, B]): Boolean = { + if (node eq null) true + else if (node.red && (isRed(node.left) || isRed(node.right))) false + else noRedAfterRed(node.left) && noRedAfterRed(node.right) + } + + def blackHeight(node: Node[A, B]): Int = { + if (node eq null) 1 + else { + val lh = blackHeight(node.left) + val rh = blackHeight(node.right) + + if (lh == -1 || lh != rh) -1 + else if (isRed(node)) lh + else lh + 1 + } + } + + isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 + } + + // building + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[sealed A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, Null] = size match { + case 0 => null + case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(x, null, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[sealed A, sealed B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(k, v, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + def copyTree[sealed A, sealed B](n: Node[A, B]): Node[A, B] = + if(n eq null) null else { + val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) + if(c.left != null) c.left.parent = c + if(c.right != null) c.right.parent = c + c + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala new file mode 100644 index 000000000000..f714a9ed46c2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala @@ -0,0 +1,258 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} +import language.experimental.captureChecking + +/** + * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +sealed class TreeMap[sealed K, sealed V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + override def sortedMapFactory = TreeMap + + /** + * Creates an empty `TreeMap`. + * @param ord the implicit ordering used to compare objects of type `K`. + * @return an empty `TreeMap`. + */ + def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, None) + } + + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, None) + } + + def keysIteratorFrom(start: K): Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, Some(start)) + } + + def iteratorFrom(start: K): Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree, Some(start)) + } + + override def valuesIteratorFrom(start: K): Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, Some(start)) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( + size, tree.root, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } + + def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } + + override def clear(): Unit = RB.clear(tree) + + def get(key: K): Option[V] = RB.get(tree, key) + + /** + * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and + * vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def contains(key: K): Boolean = RB.contains(tree, key) + + override def head: (K, V) = RB.min(tree).get + + override def last: (K, V) = RB.max(tree).get + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) + + override protected[this] def className: String = "TreeMap" + + + /** + * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: K): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = + new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) + + override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None + + override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) + override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) + override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) + override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) + override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) + override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext + override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def head = headOption.get + override def headOption = { + val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) + (entry, until) match { + case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None + case _ => entry + } + } + + override def last = lastOption.get + override def lastOption = { + val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) + (entry, from) match { + case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None + case _ => entry + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized + // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) + + override def clone() = super.clone().rangeImpl(from, until) + } + +} + +/** + * $factoryInfo + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def from[sealed K : Ordering, sealed V](it: IterableOnce[(K, V)]^): TreeMap[K, V] = + Growable.from(empty[K, V], it) + + def empty[sealed K : Ordering, sealed V]: TreeMap[K, V] = new TreeMap[K, V]() + + def newBuilder[sealed K: Ordering, sealed V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala index 59c68a768351..9ba439bea041 100644 --- a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -29,7 +29,7 @@ import language.experimental.captureChecking * @define coll mutable tree set */ // Original API designed in part by Lucien Pereira -sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) +sealed class TreeSet[sealed A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) extends AbstractSet[A] with SortedSet[A] with SortedSetOps[A, TreeSet, TreeSet[A]] @@ -192,9 +192,9 @@ sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit va @SerialVersionUID(3L) object TreeSet extends SortedIterableFactory[TreeSet] { - def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + def empty[sealed A : Ordering]: TreeSet[A] = new TreeSet[A]() - def from[E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = + def from[sealed E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = it match { case ts: TreeSet[E] if ordering == ts.ordering => new TreeSet[E](ts.tree.treeCopy()) @@ -210,7 +210,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { new TreeSet[E](t) } - def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + def newBuilder[sealed A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } def result(): TreeSet[A] = new TreeSet[A](tree) From 0888c408a1298c3db462a0099e03836cf073c466 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 17:57:00 +0100 Subject: [PATCH 089/216] Add mutable SortedMap and SeqMap to stdlib --- .../stdlib/collection/mutable/SeqMap.scala | 39 +++++++ .../stdlib/collection/mutable/SortedMap.scala | 104 ++++++++++++++++++ 2 files changed, 143 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SortedMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala new file mode 100644 index 000000000000..5740490223b2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable +import language.experimental.captureChecking + +/** + * A generic trait for ordered mutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll mutable Seq map + * @define Coll `mutable.SeqMap` + */ + +trait SeqMap[K, V] extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap) diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala new file mode 100644 index 000000000000..ff0e95c747a5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -0,0 +1,104 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} +import language.experimental.captureChecking + +/** + * Base type for mutable sorted map collections + */ +trait SortedMap[K, V] + extends collection.SortedMap[K, V] + with Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same sorted map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) +} + +trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends collection.SortedMapOps[K, V, CC, C] + with MapOps[K, V, Map, C] { + + def unsorted: Map[K, V] + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + @SerialVersionUID(3L) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] + with Serializable { + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + implicit def ordering: Ordering[K] = underlying.ordering + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} From a76129f308e7561231a24df21747e9826e5198f3 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:11:45 +0100 Subject: [PATCH 090/216] Add AnyRefMap to stdlib --- .../stdlib/collection/mutable/AnyRefMap.scala | 603 ++++++++++++++++++ 1 file changed, 603 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala new file mode 100644 index 000000000000..a6413649e219 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala @@ -0,0 +1,603 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions +import language.experimental.captureChecking + + +/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically significantly faster with `AnyRefMap` than [[HashMap]]. + * Note that numbers and characters are not handled specially in AnyRefMap; + * only plain `equals` and `hashCode` are used in comparisons. + * + * Methods that traverse or regenerate the map, including `foreach` and `map`, + * are not in general faster than with `HashMap`. The methods `foreachKey`, + * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster + * than alternative ways to achieve the same functionality. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `AnyRefMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29^ entries (approximately + * 500 million). The maximum capacity is 2^30^, but performance will degrade + * rapidly as 2^30^ is approached. + * + */ +class AnyRefMap[K <: AnyRef, sealed V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[K, V] + with MapOps[K, V, Map, AnyRefMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] + with Serializable { + + import AnyRefMap._ + def this() = this(AnyRefMap.exceptionDefault, 16, true) + + /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: K -> V) = this(defaultEntry, 16, true) + + /** Creates a new `AnyRefMap` with an initial buffer of specified size. + * + * An `AnyRefMap` can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ + def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _hashes: Array[Int] = null + private[this] var _keys: Array[AnyRef] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int): Unit = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] + ): Unit = { + mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz + } + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = { + var sz = coll.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + coll.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder + + override def size: Int = _size + override def knownSize: Int = size + override def isEmpty: Boolean = _size == 0 + override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def hashOf(key: K): Int = { + // Note: this method must not return 0 or Int.MinValue, as these indicate no element + if (key eq null) 0x41081989 + else { + val h = key.hashCode + // Part of the MurmurHash3 32 bit finalizer + val i = (h ^ (h >>> 16)) * 0x85EBCA6B + val j = (i ^ (i >>> 13)) & 0x7FFFFFFF + if (j==0) 0x41081989 else j + } + } + + private def seekEntry(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + val hashes = _hashes + val keys = _keys + while ({ g = hashes(e); g != 0}) { + if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + e | MissingBit + } + + @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + var o = -1 + while ({ g = _hashes(e); g != 0}) { + if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + else if (o == -1 && g+g == 0) o = e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (o >= 0) o | MissVacant else e | MissingBit + } + + override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 + + override def get(key: K): Option[V] = { + val i = seekEntry(hashOf(key), key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val i = seekEntry(hashOf(key), key) + if (i < 0) default else _values(i).asInstanceOf[V] + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val h = hashOf(key) + var i = seekEntryOrOpen(h, key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val oh = _hashes + val ans = defaultValue + if (oh ne _hashes) { + i = seekEntryOrOpen(h, key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key.asInstanceOf[AnyRef] + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: K): V = { + val i = seekEntry(hashOf(key), key) + (if (i < 0) null else _values(i)).asInstanceOf[V] + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead; an exception will be thrown if no + * `defaultEntry` was supplied. + */ + override def apply(key: K): V = { + val i = seekEntry(hashOf(key), key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + + /** Defers to defaultEntry to find a default value for the key. Throws an + * exception if no other default behavior was specified. + */ + override def default(key: K): V = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val oh = _hashes + val ok = _keys + val ov = _values + mask = newMask + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < oh.length) { + val h = oh(i) + if (h+h != 0) { + var e = h & mask + var x = 0 + while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + _hashes(e) = h + _keys(e) = ok(i) + _values(e) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: K, value: V): Option[V] = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to an `AnyRefMap`. + */ + override def update(key: K, value: V): Unit = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: K, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: K): this.type = { + val i = seekEntry(hashOf(key), key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _hashes(i) = Int.MinValue + _keys(i) = null + _values(i) = null + } + this + } + + def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { + protected def nextResult(k: K, v: V) = (k, v) + } + override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { + protected def nextResult(k: K, v: V) = k + } + override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { + protected def nextResult(k: K, v: V) = v + } + + private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { + private[this] val hz = _hashes + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var index = 0 + + def hasNext: Boolean = index= hz.length) return false + h = hz(index) + } + true + } + + def next(): A = { + if (hasNext) { + val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) + index += 1 + ans + } + else throw new NoSuchElementException("next") + } + + protected def nextResult(k: K, v: V): A + } + + + override def foreach[U](f: ((K,V)) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) + i += 1 + e -= 1 + } + else return + } + } + + override def foreachEntry[U](f: (K,V) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) + i += 1 + e -= 1 + } + else return + } + } + + override def clone(): AnyRefMap[K, V] = { + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val arm = new AnyRefMap[K, V](defaultEntry, 1, false) + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [sealed V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [sealed V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V2]] + xs.iterator.foreach(kv => arm += kv) + arm + } + + override def ++[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[sealed V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) + + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + f(elems(i).asInstanceOf[A]) + } + i += 1 + } + } + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) + + /** Creates a new `AnyRefMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[sealed V1](f: V => V1): AnyRefMap[K, V1] = { + val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) + def map[K2 <: AnyRef, sealed V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.Map(this, f)) + def flatMap[K2 <: AnyRef, sealed V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.FlatMap(this, f)) + def collect[K2 <: AnyRef, sealed V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) + + override def clear(): Unit = { + import java.util.Arrays.fill + fill(_keys, null) + fill(_values, null) + fill(_hashes, 0) + _size = 0 + _vacant = 0 + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "AnyRefMap" +} + +object AnyRefMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private class ExceptionDefault extends (Any -> Nothing) with Serializable { + def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) + } + private val exceptionDefault = new ExceptionDefault + + /** A builder for instances of `AnyRefMap`. + * + * This builder can be reused to create multiple instances. + */ + final class AnyRefMapBuilder[K <: AnyRef, sealed V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { + private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] + def addOne(entry: (K, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new AnyRefMap[K, V] + def result(): AnyRefMap[K, V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ + def apply[K <: AnyRef, sealed V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + + def newBuilder[K <: AnyRef, sealed V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + + private def buildFromIterableOnce[K <: AnyRef, sealed V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + elems.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new empty `AnyRefMap`. */ + def empty[K <: AnyRef, sealed V]: AnyRefMap[K, V] = new AnyRefMap[K, V] + + /** Creates a new empty `AnyRefMap` with the supplied default */ + def withDefault[K <: AnyRef, sealed V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + + /** Creates a new `AnyRefMap` from an existing source collection. A source collection + * which is already an `AnyRefMap` gets cloned. + * + * @param source Source collection + * @tparam K the type of the keys + * @tparam V the type of the values + * @return a new `AnyRefMap` with the elements of `source` + */ + def from[K <: AnyRef, sealed V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match { + case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] + case _ => buildFromIterableOnce(source) + } + + /** Creates a new `AnyRefMap` from arrays of keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, sealed V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { + val sz = math.min(keys.length, values.length) + val arm = new AnyRefMap[K, V](sz * 2) + var i = 0 + while (i < sz) { arm(keys(i)) = values(i); i += 1 } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new `AnyRefMap` from keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, sealed V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = { + val sz = math.min(keys.size, values.size) + val arm = new AnyRefMap[K, V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() + if (arm.size < (sz >> 3)) arm.repack() + arm + } + + implicit def toFactory[K <: AnyRef, sealed V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def toBuildFrom[K <: AnyRef, sealed V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it) + def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def iterableFactory[K <: AnyRef, sealed V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, sealed V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) +} From 73f9474beaf9621b958c8c9bada8cbd5806e5d3a Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:22:21 +0100 Subject: [PATCH 091/216] Add CollisionProofHashMap to stdlib --- .../stdlib/collection/mutable/Cloneable.scala | 22 + .../mutable/CollisionProofHashMap.scala | 889 ++++++++++++++++++ 2 files changed, 911 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/Cloneable.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala new file mode 100644 index 000000000000..39149e98cbf0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala @@ -0,0 +1,22 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable +import language.experimental.captureChecking + +/** A trait for cloneable collections. + * + * @tparam C Type of the collection, covariant and with reference types as upperbound. + */ +trait Cloneable[+C <: AnyRef] extends scala.Cloneable { + override def clone(): C = super.clone().asInstanceOf[C] +} diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..2b27efb6eac1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,889 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics +import language.experimental.captureChecking + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[sealed K : Ordering, sealed V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[sealed K : Ordering, sealed V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[sealed K : Ordering, sealed V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[sealed K : Ordering, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[sealed A, sealed B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[sealed A, sealed B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} From ffbaf1f23f93663013a5c34d79a61c0f282518ae Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:31:56 +0100 Subject: [PATCH 092/216] Avoid infinite recursions when checking F-bounded types --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 9d56c93bfaf5..ae9a6eaff83e 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -14,7 +14,7 @@ import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPa import typer.Checking.{checkBounds, checkAppliedTypesIn} import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} -import util.{SimpleIdentitySet, EqHashMap, SrcPos, Property} +import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.SymUtils.* import transform.{Recheck, PreRecheck} import Recheck.* @@ -147,6 +147,8 @@ object CheckCaptures: private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: + private val seen = new EqHashSet[TypeRef] + extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = val encl = carrier.maybeOwner.enclosingMethodOrClass if encl.isClass then tparam.isParametricIn(encl) @@ -160,19 +162,21 @@ object CheckCaptures: def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") - t.info match - case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => - if hi.isAny then - report.error( - em"""$what cannot $have $tp since - |that type refers to the type variable $t, which is not sealed. - |$addendum""", - pos) - else - traverse(hi) - case _ => - traverseChildren(t) + if !seen.contains(t) then + capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") + seen += t + t.info match + case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => + if hi.isAny then + report.error( + em"""$what cannot $have $tp since + |that type refers to the type variable $t, which is not sealed. + |$addendum""", + pos) + else + traverse(hi) + case _ => + traverseChildren(t) case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () case t => From 7304ba60d1eed5ac53e0da741916d5c11bbaae58 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:32:10 +0100 Subject: [PATCH 093/216] Add HashTable.scala to stdlib --- .../stdlib/collection/mutable/HashTable.scala | 418 ++++++++++++++++++ 1 file changed, 418 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/HashTable.scala diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala new file mode 100644 index 000000000000..a3534e322cf3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala @@ -0,0 +1,418 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import collection.{AbstractIterator, Iterator} + +import java.lang.Integer.{numberOfLeadingZeros, rotateRight} +import scala.util.hashing.byteswap32 + +import java.lang.Integer +import language.experimental.captureChecking + +/** This class can be used to construct data structures that are based + * on hashtables. Class `HashTable[A]` implements a hashtable + * that maps keys of type `A` to values of the fully abstract + * member type `Entry`. Classes that make use of `HashTable` + * have to provide an implementation for `Entry`. + * + * There are mainly two parameters that affect the performance of a hashtable: + * the initial size and the load factor. The size + * refers to the number of buckets in the hashtable, and the load + * factor is a measure of how full the hashtable is allowed to get before + * its size is automatically doubled. Both parameters may be changed by + * overriding the corresponding values in class `HashTable`. + * + * @tparam A type of the elements contained in this hash table. + */ +// Not used in the standard library, but used in scala-parallel-collections +private[collection] trait HashTable[sealed A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { + // Replacing Entry type parameter by abstract type member here allows to not expose to public + // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. + // However, I'm afraid it's too late now for such breaking change. + import HashTable._ + + protected var _loadFactor = defaultLoadFactor + + /** The actual hash table. + */ + protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) + + /** The number of mappings contained in this hash table. + */ + protected[collection] var tableSize: Int = 0 + + final def size: Int = tableSize + + /** The next size value at which to resize (capacity * load factor). + */ + protected[collection] var threshold: Int = initialThreshold(_loadFactor) + + /** The array keeping track of the number of elements in 32 element blocks. + */ + protected var sizemap: Array[Int] = null + + protected var seedvalue: Int = tableSizeSeed + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + + /** The initial size of the hash table. + */ + protected def initialSize: Int = 16 + + /** The initial threshold. + */ + private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) + + private def initialCapacity = capacity(initialSize) + + private def lastPopulatedIndex = { + var idx = table.length - 1 + while (table(idx) == null && idx > 0) + idx -= 1 + + idx + } + + /** + * Initializes the collection from the input stream. `readEntry` will be called for each + * entry to be read from the input stream. + */ + private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { + _loadFactor = in.readInt() + assert(_loadFactor > 0) + + val size = in.readInt() + tableSize = 0 + assert(size >= 0) + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() + + table = new Array(capacity(sizeForThreshold(_loadFactor, size))) + threshold = newThreshold(_loadFactor, table.length) + + if (smDefined) sizeMapInit(table.length) else sizemap = null + + var index = 0 + while (index < size) { + addEntry(readEntry) + index += 1 + } + } + + /** + * Serializes the collection to the output stream by saving the load factor, collection + * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. + * + * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To + * deserialize, `init` should be used. + */ + private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { + out.writeInt(_loadFactor) + out.writeInt(tableSize) + out.writeInt(seedvalue) + out.writeBoolean(isSizeMapDefined) + + foreachEntry(writeEntry) + } + + /** Find entry with given key in table, null if not found. + */ + final def findEntry(key: A): Entry = + findEntry0(key, index(elemHashCode(key))) + + protected[collection] final def findEntry0(key: A, h: Int): Entry = { + var e = table(h).asInstanceOf[Entry] + while (e != null && !elemEquals(e.key, key)) e = e.next + e + } + + /** Add entry to table + * pre: no entry with same key exists + */ + protected[collection] final def addEntry(e: Entry): Unit = { + addEntry0(e, index(elemHashCode(e.key))) + } + + protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { + e.next = table(h).asInstanceOf[Entry] + table(h) = e + tableSize = tableSize + 1 + nnSizeMapAdd(h) + if (tableSize > threshold) + resize(2 * table.length) + } + + /** Find entry with given key in table, or add new one if not found. + * May be somewhat faster then `findEntry`/`addEntry` pair as it + * computes entry's hash index only once. + * Returns entry found in table or null. + * New entries are created by calling `createNewEntry` method. + */ + def findOrAddEntry(key: A, value: B): Entry = { + val h = index(elemHashCode(key)) + val e = findEntry0(key, h) + if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } + } + + /** Creates new entry to be immediately inserted into the hashtable. + * This method is guaranteed to be called only once and in case that the entry + * will be added. In other words, an implementation may be side-effecting. + */ + def createNewEntry(key: A, value: B): Entry + + /** Remove entry from table if present. + */ + final def removeEntry(key: A) : Entry = { + removeEntry0(key, index(elemHashCode(key))) + } + /** Remove entry from table if present. + */ + private[collection] final def removeEntry0(key: A, h: Int) : Entry = { + var e = table(h).asInstanceOf[Entry] + if (e != null) { + if (elemEquals(e.key, key)) { + table(h) = e.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e.next = null + return e + } else { + var e1 = e.next + while (e1 != null && !elemEquals(e1.key, key)) { + e = e1 + e1 = e1.next + } + if (e1 != null) { + e.next = e1.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e1.next = null + return e1 + } + } + } + null + } + + /** An iterator returning all entries. + */ + def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + def hasNext = es != null + def next() = { + val res = es + es = es.next + while (es == null && idx > 0) { + idx = idx - 1 + es = iterTable(idx) + } + res.asInstanceOf[Entry] + } + } + + /** Avoid iterator for a 2x faster traversal. */ + def foreachEntry[U](f: Entry => U): Unit = { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + while (es != null) { + val next = es.next // Cache next in case f removes es. + f(es.asInstanceOf[Entry]) + es = next + + while (es == null && idx > 0) { + idx -= 1 + es = iterTable(idx) + } + } + } + + /** Remove all entries from table + */ + def clearTable(): Unit = { + var i = table.length - 1 + while (i >= 0) { table(i) = null; i = i - 1 } + tableSize = 0 + nnSizeMapReset(0) + } + + private def resize(newSize: Int): Unit = { + val oldTable = table + table = new Array(newSize) + nnSizeMapReset(table.length) + var i = oldTable.length - 1 + while (i >= 0) { + var e = oldTable(i) + while (e != null) { + val h = index(elemHashCode(e.key)) + val e1 = e.next + e.next = table(h).asInstanceOf[Entry] + table(h) = e + e = e1 + nnSizeMapAdd(h) + } + i = i - 1 + } + threshold = newThreshold(_loadFactor, newSize) + } + + /* Size map handling code */ + + /* + * The following three sizeMap* functions (Add, Remove, Reset) + * are used to update the size map of the hash table. + * + * The size map logically divides the hash table into `sizeMapBucketSize` element buckets + * by keeping an integer entry for each such bucket. Each integer entry simply denotes + * the number of elements in the corresponding bucket. + * Best understood through an example, see: + * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) + * sizemap = [ 2 | 3 ] (2 entries) + * where sizeMapBucketSize == 4. + * + * By default the size map is not initialized, so these methods don't do anything, thus, + * their impact on hash table performance is negligible. However, if the hash table + * is converted into a parallel hash table, the size map is initialized, as it will be needed + * there. + */ + protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) += 1 + } + + protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) -= 1 + } + + protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { + val nsize = calcSizeMapSize(tableLength) + if (sizemap.length != nsize) sizemap = new Array[Int](nsize) + else java.util.Arrays.fill(sizemap, 0) + } + + private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize + + protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 + + // discards the previous sizemap and only allocates a new one + protected def sizeMapInit(tableLength: Int): Unit = { + sizemap = new Array[Int](calcSizeMapSize(tableLength)) + } + + // discards the previous sizemap and populates the new one + protected final def sizeMapInitAndRebuild() = { + sizeMapInit(table.length) + + // go through the buckets, count elements + var tableidx = 0 + var bucketidx = 0 + val tbl = table + var tableuntil = 0 + if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize + val totalbuckets = totalSizeMapBuckets + while (bucketidx < totalbuckets) { + var currbucketsize = 0 + while (tableidx < tableuntil) { + var e = tbl(tableidx) + while (e ne null) { + currbucketsize += 1 + e = e.next + } + tableidx += 1 + } + sizemap(bucketidx) = currbucketsize + tableuntil += sizeMapBucketSize + bucketidx += 1 + } + } + + private[collection] def printSizeMap() = { + println(sizemap.to(collection.immutable.List)) + } + + protected final def sizeMapDisable() = sizemap = null + + protected final def isSizeMapDefined = sizemap ne null + + // override to automatically initialize the size map + protected def alwaysInitSizeMap = false + + /* End of size map handling code */ + + protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) + + /** + * Note: we take the most significant bits of the hashcode, not the lower ones + * this is of crucial importance when populating the table in parallel + */ + protected[collection] final def index(hcode: Int): Int = { + val ones = table.length - 1 + val exponent = Integer.numberOfLeadingZeros(ones) + (improve(hcode, seedvalue) >>> exponent) & ones + } +} + +private[collection] object HashTable { + /** The load factor for the hash table (in 0.001 step). + */ + private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% + private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible + + private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt + + private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt + + private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) + + trait HashUtils[KeyType] { + protected final def sizeMapBucketBitSize = 5 + // so that: + protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize + + protected[collection] def elemHashCode(key: KeyType) = key.## + + /** + * Defer to a high-quality hash in [[scala.util.hashing]]. + * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. + *

+ * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 + * {{{ + * var h: Int = hcode + ~(hcode << 9) + * h = h ^ (h >>> 14) + * h = h + (h << 4) + * h ^ (h >>> 10) + * }}} + * the rest of the computation is due to SI-5293 + */ + protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) + } + + /** + * Returns a power of two >= `target`. + */ + private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** Class used internally. + */ +private[collection] trait HashEntry[A, sealed E <: HashEntry[A, E]] { + val key: A + var next: E = _ +} From f3ed83b748a849b371659b84c884a739acbead81 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:38:51 +0100 Subject: [PATCH 094/216] Add HashSet to stdlib --- .../stdlib/collection/Iterator.scala | 2 +- .../collection/StrictOptimizedSeqOps.scala | 2 +- .../stdlib/collection/mutable/HashSet.scala | 457 ++++++++++++++++++ .../stdlib/collection/mutable/MultiMap.scala | 2 +- 4 files changed, 460 insertions(+), 3 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/mutable/HashSet.scala diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 172cd7c2a282..cf722235008c 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -562,7 +562,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] val traversedValues = mutable.HashSet.empty[B @uncheckedCaptures] private[this] var nextElementDefined: Boolean = false private[this] var nextElement: A = _ diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 14dea1694d09..7a5c58bf2abf 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -25,7 +25,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def distinctBy[B](f: A -> B): C = { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B] + val seen = mutable.HashSet.empty[B @uncheckedCaptures] val it = this.iterator while (it.hasNext) { val next = it.next() diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala new file mode 100644 index 000000000000..e8c055ff15ef --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala @@ -0,0 +1,457 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** This class implements mutable sets using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class HashSet[sealed A](initialCapacity: Int, loadFactor: Double) + extends AbstractSet[A] + with SetOps[A, HashSet, HashSet[A]] + with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with Serializable { + + def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) + + import HashSet.Node + + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this element */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(elem: A): Boolean = findNode(elem) ne null + + @`inline` private[this] def findNode(elem: A): Node[A] = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case nd => nd.findNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def add(elem: A) : Boolean = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + addElem(elem, computeHash(elem)) + } + + override def addAll(xs: IterableOnce[A]^): this.type = { + sizeHint(xs.knownSize) + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case _ => super.addAll(xs) + } + } + + override def subtractAll(xs: IterableOnce[A]^): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHashWhile { (k, h) => + remove(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds an element to this set + * @param elem element to add + * @param hash the **improved** hash of `elem` (see computeHash) + */ + private[this] def addElem(elem: A, hash: Int) : Boolean = { + val idx = index(hash) + table(idx) match { + case null => + table(idx) = new Node(elem, hash, null) + case old => + var prev: Node[A] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + if(prev eq null) + table(idx) = new Node(elem, hash, old) + else + prev.next = new Node(elem, hash, prev.next) + } + contentSize += 1 + true + } + + private[this] def remove(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) + + private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { + private[this] var i = 0 + private[this] var node: Node[A] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[A]): B + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): B = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[A] = new HashSetIterator[A] { + override protected[this] def extract(nd: Node[A]): A = nd.key + } + + /** Returns an iterator over the nodes stored in this HashSet */ + private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { + override protected[this] def extract(nd: Node[A]): Node[A] = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) + val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[A] = preLow + var lastHigh: Node[A] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + /* + private[mutable] def checkTable(): Unit = { + var i = 0 + var count = 0 + var prev: Node[A] = null + while(i < table.length) { + var n = table(i) + prev = null + while(n != null) { + count += 1 + assert(index(n.hash) == i) + if(prev ne null) assert(prev.hash <= n.hash) + prev = n + n = n.next + } + i += 1 + } + assert(contentSize == count) + } + */ + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + @`inline` def addOne(elem: A): this.type = { add(elem); this } + + @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: A => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) + + override protected[this] def className = "HashSet" + + override def hashCode: Int = { + val setIterator = this.iterator + val hashIterator: Iterator[Any] = + if (setIterator.isEmpty) setIterator + else new HashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[A]): Any = { + hash = unimproveHash(nd.hash) + this + } + } + MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + def from[sealed B](it: scala.collection.IterableOnce[B]^): HashSet[B] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashSet[B](cap, defaultLoadFactor) ++= it + } + + def empty[sealed A]: HashSet[A] = new HashSet[A] + + def newBuilder[sealed A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[sealed A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[sealed A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { + def key: K = _key + def hash: Int = _hash + def next: Node[K] = _next + def next_= (n: Node[K]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: K => U): Unit = { + f(_key) + if(_next ne null) _next.foreach(f) + } + + override def toString = s"Node($key, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala index 0b250a5548ef..281631c92298 100644 --- a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -52,7 +52,7 @@ import language.experimental.captureChecking * @define Coll `MultiMap` */ @deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") -trait MultiMap[K, V] extends Map[K, Set[V]] { +trait MultiMap[K, sealed V] extends Map[K, Set[V]] { /** Creates a new set. * * Classes that use this trait as a mixin can override this method From 7d2f133e56af2d59df684ea467df53b7ca6ed8f5 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:42:26 +0100 Subject: [PATCH 095/216] Add OpenHashMap to stdlib --- .../collection/mutable/OpenHashMap.scala | 307 ++++++++++++++++++ 1 file changed, 307 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala new file mode 100644 index 000000000000..f1deb25b6a8a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala @@ -0,0 +1,307 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.lang.Integer.numberOfLeadingZeros +import java.util.ConcurrentModificationException +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking + +/** + * @define Coll `OpenHashMap` + * @define coll open hash map + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +@SerialVersionUID(3L) +object OpenHashMap extends MapFactory[OpenHashMap] { + + def empty[sealed K, sealed V] = new OpenHashMap[K, V] + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it + + def newBuilder[sealed K, sealed V]: Builder[(K, V), OpenHashMap[K,V]] = + new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) + + /** A hash table entry. + * + * The entry is occupied if and only if its `value` is a `Some`; + * deleted if and only if its `value` is `None`. + * If its `key` is not the default value of type `Key`, the entry is occupied. + * If the entry is occupied, `hash` contains the hash value of `key`. + */ + final private class OpenEntry[sealed Key, sealed Value](var key: Key, + var hash: Int, + var value: Option[Value]) + + private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** A mutable hash map based on an open addressing method. The precise scheme is + * undefined, but it should make a reasonable effort to ensure that an insert + * with consecutive hash codes is not unnecessarily penalised. In particular, + * mappings of consecutive integer keys should work without significant + * performance loss. + * + * @tparam Key type of the keys in this map. + * @tparam Value type of the values in this map. + * @param initialSize the initial size of the internal hash table. + * + * @define Coll `OpenHashMap` + * @define coll open hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +class OpenHashMap[sealed Key, sealed Value](initialSize : Int) + extends AbstractMap[Key, Value] + with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] + with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] + with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] + with DefaultSerializable { + + import OpenHashMap.OpenEntry + private type Entry = OpenEntry[Key, Value] + + /** A default constructor creates a hashmap with initial size `8`. + */ + def this() = this(8) + + override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap + + private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) + + private[this] var mask = actualInitialSize - 1 + + /** The hash table. + * + * The table's entries are initialized to `null`, indication of an empty slot. + * A slot is either deleted or occupied if and only if the entry is non-`null`. + */ + private[this] var table = new Array[Entry](actualInitialSize) + + private[this] var _size = 0 + private[this] var deleted = 0 + + // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. + private[this] var modCount = 0 + + override def size = _size + override def knownSize: Int = size + private[this] def size_=(s : Int): Unit = _size = s + override def isEmpty: Boolean = _size == 0 + /** Returns a mangled hash code of the provided key. */ + protected def hashOf(key: Key) = { + var h = key.## + h ^= ((h >>> 20) ^ (h >>> 12)) + h ^ (h >>> 7) ^ (h >>> 4) + } + + /** Increase the size of the table. + * Copy only the occupied slots, effectively eliminating the deleted slots. + */ + private[this] def growTable() = { + val oldSize = mask + 1 + val newSize = 4 * oldSize + val oldTable = table + table = new Array[Entry](newSize) + mask = newSize - 1 + oldTable.foreach( entry => + if (entry != null && entry.value != None) + table(findIndex(entry.key, entry.hash)) = entry ) + deleted = 0 + } + + /** Return the index of the first slot in the hash table (in probe order) + * that is, in order of preference, either occupied by the given key, deleted, or empty. + * + * @param hash hash value for `key` + */ + private[this] def findIndex(key: Key, hash: Int): Int = { + var index = hash & mask + var j = 0 + + // Index of the first slot containing a deleted entry, or -1 if none found yet + var firstDeletedIndex = -1 + + var entry = table(index) + while (entry != null) { + if (entry.hash == hash && entry.key == key && entry.value != None) + return index + + if (firstDeletedIndex == -1 && entry.value == None) + firstDeletedIndex = index + + j += 1 + index = (index + j) & mask + entry = table(index) + } + + if (firstDeletedIndex == -1) index else firstDeletedIndex + } + + // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. + override def update(key: Key, value: Value): Unit = put(key, value) + + @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") + def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } + + @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") + def subtractOne (key: Key): this.type = { remove(key); this } + + override def put(key: Key, value: Value): Option[Value] = + put(key, hashOf(key), value) + + private def put(key: Key, hash: Int, value: Value): Option[Value] = { + if (2 * (size + deleted) > mask) growTable() + val index = findIndex(key, hash) + val entry = table(index) + if (entry == null) { + table(index) = new OpenEntry(key, hash, Some(value)) + modCount += 1 + size += 1 + None + } else { + val res = entry.value + if (entry.value == None) { + entry.key = key + entry.hash = hash + size += 1 + deleted -= 1 + modCount += 1 + } + entry.value = Some(value) + res + } + } + + /** Delete the hash table slot contained in the given entry. */ + @`inline` + private[this] def deleteSlot(entry: Entry) = { + entry.key = null.asInstanceOf[Key] + entry.hash = 0 + entry.value = None + + size -= 1 + deleted += 1 + } + + override def remove(key : Key): Option[Value] = { + val entry = table(findIndex(key, hashOf(key))) + if (entry != null && entry.value != None) { + val res = entry.value + deleteSlot(entry) + res + } else None + } + + def get(key : Key) : Option[Value] = { + val hash = hashOf(key) + var index = hash & mask + var entry = table(index) + var j = 0 + while(entry != null){ + if (entry.hash == hash && + entry.key == key){ + return entry.value + } + + j += 1 + index = (index + j) & mask + entry = table(index) + } + None + } + + /** An iterator over the elements of this map. Use of this iterator follows + * the same contract for concurrent modification as the foreach method. + * + * @return the iterator + */ + def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { + override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) + } + + override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { + override protected def nextResult(node: Entry): Key = node.key + } + override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { + override protected def nextResult(node: Entry): Value = node.value.get + } + + private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { + private[this] var index = 0 + private[this] val initialModCount = modCount + + private[this] def advance(): Unit = { + if (initialModCount != modCount) throw new ConcurrentModificationException + while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 + } + + def hasNext = {advance(); index <= mask } + + def next() = { + advance() + val result = table(index) + index += 1 + nextResult(result) + } + protected def nextResult(node: Entry): A + } + + override def clone() = { + val it = new OpenHashMap[Key, Value] + foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) + it + } + + /** Loop over the key, value mappings of this map. + * + * The behaviour of modifying the map during an iteration is as follows: + * - Deleting a mapping is always permitted. + * - Changing the value of mapping which is already present is permitted. + * - Anything else is not permitted. It will usually, but not always, throw an exception. + * + * @tparam U The return type of the specified function `f`, return result of which is ignored. + * @param f The function to apply to each key, value mapping. + */ + override def foreach[U](f : ((Key, Value)) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f((entry.key, entry.value.get))} + ) + } + override def foreachEntry[U](f : (Key, Value) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f(entry.key, entry.value.get)} + ) + } + + private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { + table.foreach(entry => if (entry != null && entry.value != None) f(entry)) + } + + override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { + foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) + this + } + + override def filterInPlace(f : (Key, Value) => Boolean): this.type = { + foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) + this + } + + override protected[this] def stringPrefix = "OpenHashMap" +} From 9021cfdba6b2331deba17b59f650e9ccb40864f2 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 19:33:46 +0100 Subject: [PATCH 096/216] Add MapView to stdlib --- tests/pos-special/stdlib/collection/Map.scala | 12 +- .../stdlib/collection/MapView.scala | 196 ++++++++++++++++++ 2 files changed, 203 insertions(+), 5 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/MapView.scala diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index 6816a1fd174a..8ab25a3c13e0 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -18,6 +18,7 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** Base Map type */ trait Map[K, +V] @@ -103,8 +104,9 @@ trait Map[K, +V] trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends IterableOps[(K, V), Iterable, C] with PartialFunction[K, V] { + this: MapOps[K, V, CC, C]^ => - override def view: MapView[K, V] = new MapView.Id(this) + override def view: MapView[K, V]^{this} = new MapView.Id(this) /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { @@ -253,7 +255,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * the predicate `p`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. @@ -261,7 +263,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) /** Defines the default value computation for the map, * returned when a key is not found @@ -354,7 +356,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] @deprecated("Consider requiring an immutable Map.", "2.13.0") @`inline` def -- (keys: IterableOnce[K]^): C = { lazy val keysSet = keys.iterator.to(immutable.Set) - fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") @@ -375,7 +377,7 @@ object MapOps { */ @SerialVersionUID(3L) class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( - self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _], + self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^, p: ((K, V)) => Boolean ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala new file mode 100644 index 000000000000..ac9e88466052 --- /dev/null +++ b/tests/pos-special/stdlib/collection/MapView.scala @@ -0,0 +1,196 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn +import scala.collection.MapView.SomeMapOps +import scala.collection.mutable.Builder +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + +trait MapView[K, +V] + extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] + with View[(K, V)] { + this: MapView[K, V]^ => + + override def view: MapView[K, V]^{this} = this + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all keys of this map. + * + * @return the keys of this map as a view. + */ + override def keys: Iterable[K]^{this} = new MapView.Keys(this) + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all values of this map. + * + * @return the values of this map as a view. + */ + override def values: Iterable[V]^{this} = new MapView.Values(this) + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) + + override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred) + + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred) + + override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p)) + + override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f) + + def mapFactory: MapViewFactory = MapView + + override def empty: MapView[K, V] = mapFactory.empty + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p) + + override def toString: String = super[View].toString + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "MapView" +} + +object MapView extends MapViewFactory { + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] + /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ + type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] + + @SerialVersionUID(3L) + object EmptyMapView extends AbstractMapView[Any, Nothing] { + // !!! cc problem: crash when we replace the line with + // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + override def get(key: Any): Option[Nothing] = None + override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this + override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this + override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) + } + + @SerialVersionUID(3L) + class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] { + def get(key: K): Option[V] = underlying.get(key) + def iterator: Iterator[(K, V)]^{this} = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] { + def iterator: Iterator[K]^{this} = underlying.keysIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] { + def iterator: Iterator[V]^{this} = underlying.valuesIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2))) + def get(key: K): Option[W] = underlying.get(key).map(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) } + def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped) + def get(key: K): Option[V] = underlying.get(key) match { + case s @ Some(v) if p((key, v)) != isFlipped => s + case _ => None + } + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] { + override def get(key: K): Option[V] = { + underlying.get(key) match { + case s @ Some(v) => + f((key, v)) + s + case None => None + } + } + override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + override def newBuilder[sealed X, sealed Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + + override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] + + override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] = + View.from(it).unsafeAssumePure + // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory, + // and the latter assumes maps are strict, so from's result captures nothing. + + override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match { + case mv: MapView[K, V] => mv + case other => new MapView.Id(other) + } + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { + + def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] + + def empty[X, Y]: MapView[X, Y] + + def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]: + this: AbstractMapView[K, V]^ => + From cb66516b99c8fd2742c378415af932bdf07ce08b Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 21:34:19 +0100 Subject: [PATCH 097/216] Add StrictOptimizedMapOps to stdlib --- .../stdlib/collection/DefaultMap.scala | 21 +++++++ .../stdlib/collection/Searching.scala | 58 +++++++++++++++++++ .../collection/{mutable => }/SeqMap.scala | 22 +++---- .../collection/StrictOptimizedMapOps.scala | 50 ++++++++++++++++ 4 files changed, 141 insertions(+), 10 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/DefaultMap.scala create mode 100644 tests/pos-special/stdlib/collection/Searching.scala rename tests/pos-special/stdlib/collection/{mutable => }/SeqMap.scala (63%) create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala new file mode 100644 index 000000000000..baa9eceadae5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/DefaultMap.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +import language.experimental.captureChecking + +/** A default map which builds a default `immutable.Map` implementation for all + * transformations. + */ +@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") +trait DefaultMap[K, +V] extends Map[K, V] diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala new file mode 100644 index 000000000000..f5139422e24c --- /dev/null +++ b/tests/pos-special/stdlib/collection/Searching.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions +import scala.collection.generic.IsSeq +import language.experimental.captureChecking + +object Searching { + + /** The result of performing a search on a sorted sequence + * + * Example usage: + * + * {{{ + * val list = List(1, 3, 4, 5) // list must be sorted before searching + * list.search(4) // Found(2) + * list.search(2) // InsertionPoint(1) + * }}} + * + * */ + sealed abstract class SearchResult { + /** The index corresponding to the element searched for in the sequence, if it was found, + * or the index where the element would be inserted in the sequence, if it was not in the sequence */ + def insertionPoint: Int + } + + /** The result of performing a search on a sorted sequence, where the element was found. + * + * @param foundIndex the index corresponding to the element searched for in the sequence + */ + case class Found(foundIndex: Int) extends SearchResult { + override def insertionPoint: Int = foundIndex + } + + /** The result of performing a search on a sorted sequence, where the element was not found + * + * @param insertionPoint the index where the element would be inserted in the sequence + */ + case class InsertionPoint(insertionPoint: Int) extends SearchResult + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = + new SearchImpl(fr.conversion(coll)) +} diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala similarity index 63% rename from tests/pos-special/stdlib/collection/mutable/SeqMap.scala rename to tests/pos-special/stdlib/collection/SeqMap.scala index 5740490223b2..a7f2c629b61d 100644 --- a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/SeqMap.scala @@ -10,13 +10,13 @@ * additional information regarding copyright ownership. */ -package scala -package collection -package mutable +package scala.collection import language.experimental.captureChecking +import scala.annotation.nowarn + /** - * A generic trait for ordered mutable maps. Concrete classes have to provide + * A generic trait for ordered maps. Concrete classes have to provide * functionality for the abstract methods in `SeqMap`. * * Note that when checking for equality [[SeqMap]] does not take into account @@ -24,16 +24,18 @@ import language.experimental.captureChecking * * @tparam K the type of the keys contained in this linked map. * @tparam V the type of the values associated with the keys in this linked map. - * - * @define coll mutable Seq map - * @define Coll `mutable.SeqMap` + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` */ -trait SeqMap[K, V] extends Map[K, V] - with collection.SeqMap[K, V] +trait SeqMap[K, +V] extends Map[K, V] with MapOps[K, V, SeqMap, SeqMap[K, V]] with MapFactoryDefaults[K, V, SeqMap, Iterable] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqMap" + override def mapFactory: MapFactory[SeqMap] = SeqMap } -object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap) +object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) + diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala new file mode 100644 index 000000000000..a9c5e0af43b3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala @@ -0,0 +1,50 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +/** + * Trait that overrides map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends MapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] + with Pure { + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + strictOptimizedMap(mapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + strictOptimizedFlatMap(mapFactory.newBuilder, f) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = + strictOptimizedConcat(suffix, mapFactory.newBuilder) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + strictOptimizedCollect(mapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val b = mapFactory.newBuilder[K, V1] + b ++= this + b += elem1 + b += elem2 + if (elems.nonEmpty) b ++= elems + b.result() + } +} From be20a7c58064e63b0402b728ba8b2452cd1809cc Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 21:41:25 +0100 Subject: [PATCH 098/216] Add collection/SortedMap to stdlib --- .../stdlib/collection/SortedMap.scala | 222 ++++++++++++++++++ .../stdlib/collection/mutable/SortedMap.scala | 2 +- 2 files changed, 223 insertions(+), 1 deletion(-) create mode 100644 tests/pos-special/stdlib/collection/SortedMap.scala diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala new file mode 100644 index 000000000000..7b9381ebb078 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedMap.scala @@ -0,0 +1,222 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] + with Pure { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala index ff0e95c747a5..8017177f5720 100644 --- a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -92,7 +92,7 @@ object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] = underlying.concat(suffix).withDefault(defaultValue) override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = From 766b02051881addf71301c46bc99356c2c25432b Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 21:46:24 +0100 Subject: [PATCH 099/216] Add more Ops classes to stdlib --- .../stdlib/collection/SortedOps.scala | 91 +++++++++++++++++++ .../StrictOptimizedSortedMapOps.scala | 47 ++++++++++ .../StrictOptimizedSortedSetOps.scala | 42 +++++++++ 3 files changed, 180 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/SortedOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala new file mode 100644 index 000000000000..16751d86d9d5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedOps.scala @@ -0,0 +1,91 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import language.experimental.captureChecking + +/** Base trait for sorted collections */ +trait SortedOps[A, +C] { + + def ordering: Ordering[A] + + /** Returns the first key of the collection. */ + def firstKey: A + + /** Returns the last key of the collection. */ + def lastKey: A + + /** Comparison function that orders keys. */ + @deprecated("Use ordering.compare instead", "2.13.0") + @deprecatedOverriding("Use ordering.compare instead", "2.13.0") + @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) + + /** Creates a ranged projection of this collection. Any mutations in the + * ranged projection will update this collection and vice versa. + * + * Note: keys are not guaranteed to be consistent between this collection + * and the projection. This is the case for buffers where indexing is + * relative to the projection. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * `None` if there is no lower bound. + * @param until The upper-bound (exclusive) of the ranged projection. + * `None` if there is no upper bound. + */ + def rangeImpl(from: Option[A], until: Option[A]): C + + /** Creates a ranged projection of this collection with both a lower-bound + * and an upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeFrom", "2.13.0") + final def from(from: A): C = rangeFrom(from) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + def rangeFrom(from: A): C = rangeImpl(Some(from), None) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + @deprecated("Use rangeUntil", "2.13.0") + final def until(until: A): C = rangeUntil(until) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def rangeUntil(until: A): C = rangeImpl(None, Some(until)) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeTo", "2.13.0") + final def to(to: A): C = rangeTo(to) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + def rangeTo(to: A): C +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..9a9e6e367922 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import language.experimental.captureChecking + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala new file mode 100644 index 000000000000..ded7deabccca --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.implicitNotFound +import scala.annotation.unchecked.uncheckedVariance + +/** + * Trait that overrides sorted set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { + + override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedMap(sortedIterableFactory.newBuilder, f) + + override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) + + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) + + override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) + +} From 36975c108fa46280964c39043380d6a48a8b8c32 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 21:51:15 +0100 Subject: [PATCH 100/216] Add remaining collection classes to stdlib --- .../stdlib/collection/StringParsers.scala | 320 ++++++++++++++++++ .../stdlib/collection/package.scala | 81 +++++ 2 files changed, 401 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/StringParsers.scala create mode 100644 tests/pos-special/stdlib/collection/package.scala diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala new file mode 100644 index 000000000000..47281815da71 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StringParsers.scala @@ -0,0 +1,320 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec +import language.experimental.captureChecking + +/** A module containing the implementations of parsers from strings to numeric types, and boolean + */ +private[scala] object StringParsers { + + //compile-time constant helpers + + //Int.MinValue == -2147483648 + private final val intOverflowBoundary = -214748364 + private final val intOverflowDigit = 9 + //Long.MinValue == -9223372036854775808L + private final val longOverflowBoundary = -922337203685477580L + private final val longOverflowDigit = 9 + + @inline + private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) + + @inline + private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { + @tailrec + def rec(i: Int, agg: Int): Option[Int] = + if (agg < min) None + else if (i == len) { + if (!isPositive) Some(agg) + else if (agg == min) None + else Some(-agg) + } + else { + val digit = decValue(from.charAt(i)) + if (digit == -1) None + else rec(i + 1, agg * 10 - digit) + } + rec(1, agg) + } + + @inline + private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' + + //bool + @inline + final def parseBool(from: String): Option[Boolean] = + if (from.equalsIgnoreCase("true")) Some(true) + else if (from.equalsIgnoreCase("false")) Some(false) + else None + + //integral types + final def parseByte(from: String): Option[Byte] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toByte) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte) + else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte) + else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte) + else None + } + } + + final def parseShort(from: String): Option[Short] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toShort) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort) + else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort) + else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort) + else None + } + } + + final def parseInt(from: String): Option[Int] = { + val len = from.length() + + @tailrec + def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { + if (i == len) { + if (!isPositive) Some(agg) + else if (agg == Int.MinValue) None + else Some(-agg) + } + else if (agg < intOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None + else step(i + 1, (agg * 10) - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + final def parseLong(from: String): Option[Long] = { + //like parseInt, but Longer + val len = from.length() + + @tailrec + def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { + if (i == len) { + if (isPositive && agg == Long.MinValue) None + else if (isPositive) Some(-agg) + else Some(agg) + } + else if (agg < longOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None + else step(i + 1, agg * 10 - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first).toLong + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + //floating point + final def checkFloatFormat(format: String): Boolean = { + //indices are tracked with a start index which points *at* the first index + //and an end index which points *after* the last index + //so that slice length === end - start + //thus start == end <=> empty slice + //and format.substring(start, end) is equivalent to the slice + + //some utilities for working with index bounds into the original string + @inline + def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { + @tailrec + def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) + rec(start) + } + + //one after last index for the predicate to hold, or `from` if none hold + //may point after the end of the string + @inline + def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { + @tailrec @inline + def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) + else i + rec(from) + } + + + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F')) + + def prefixOK(startIndex: Int, endIndex: Int): Boolean = { + val len = endIndex - startIndex + (len > 0) && { + //the prefix part is + //hexDigits + //hexDigits. + //hexDigits.hexDigits + //.hexDigits + //but not . + if (format.charAt(startIndex) == '.') { + (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) + } else { + val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) + (noLeading >= endIndex) || + ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) + } + } + } + + def postfixOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + (forAllBetween(startIndex, endIndex, isDigit)) || { + val startchar = format.charAt(startIndex) + (startchar == '+' || startchar == '-') && + (endIndex - startIndex > 1) && + forAllBetween(startIndex + 1, endIndex, isDigit) + } + } + // prefix [pP] postfix + val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) + (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) + } + + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + //invariant: endIndex > startIndex + + def isExp(c: Char): Boolean = c == 'e' || c == 'E' + + def expOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + val startChar = format.charAt(startIndex) + if (startChar == '+' || startChar == '-') + (endIndex > (startIndex + 1)) && + skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex + else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex + } + + //significant can be one of + //* digits.digits + //* .digits + //* digits. + //but not just . + val startChar = format.charAt(startIndex) + if (startChar == '.') { + val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) + // a digit is required followed by optional exp + (noSignificant > startIndex + 1) && (noSignificant >= endIndex || + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + ) + } + else if (isDigit(startChar)) { + // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent + val noInt = skipIndexWhile(isDigit, startIndex, endIndex) + // just the digits + (noInt == endIndex) || { + if (format.charAt(noInt) == '.') { + val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) + (noSignificant >= endIndex) || //no exponent + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + } else + isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) + } + } + else false + } + + //count 0x00 to 0x20 as "whitespace", and nothing else + val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) + val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 + + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false + else { + //all formats can have a sign + val unsigned = { + val startchar = format.charAt(unspacedStart) + if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart + } + if (unsigned >= unspacedEnd) false + //that's it for NaN and Infinity + else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" + else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" + else { + //all other formats can have a format suffix + val desuffixed = { + val endchar = format.charAt(unspacedEnd - 1) + if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 + else unspacedEnd + } + val len = desuffixed - unsigned + if (len <= 0) false + else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) + format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) + else isDecFloatLiteral(unsigned, desuffixed) + } + } + } + + @inline + def parseFloat(from: String): Option[Float] = + if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) + else None + + @inline + def parseDouble(from: String): Option[Double] = + if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) + else None + +} diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala new file mode 100644 index 000000000000..ad4686be1fb2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/package.scala @@ -0,0 +1,81 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +import language.experimental.captureChecking + +package object collection { + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+X] = IterableOnce[X] + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + val TraversableOnce = IterableOnce + @deprecated("Use SeqOps instead of SeqLike", "2.13.0") + type SeqLike[A, T] = SeqOps[A, Seq, T] + @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") + type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] + + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversableOnce[+X] = IterableOnce[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversableOnce = IterableOnce + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenIterable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenIterable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSeq[+X] = Seq[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSeq = Seq + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSet[X] = Set[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSet = Set + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenMap[K, +V] = Map[K, V] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenMap = Map + + /** Needed to circumvent a difficulty between dotty and scalac concerning + * the right top type for a type parameter of kind * -> *. + * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. + * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. + */ + private[scala] type AnyConstr[X] = Any + + /** An extractor used to head/tail deconstruct sequences. */ + object +: { + /** Splits a sequence into head +: tail. + * @return Some((head, tail)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = + if(t.isEmpty) None + else Some(t.head -> t.tail) + } + + /** An extractor used to init/last deconstruct sequences. */ + object :+ { + /** Splits a sequence into init :+ last. + * @return Some((init, last)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = + if(t.isEmpty) None + else Some(t.init -> t.last) + } +} From b899f8db9e8bb9d2f924ba95310ec959d32b0f08 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 10:11:31 +0100 Subject: [PATCH 101/216] Add immutable ArraySeq to stdlib --- .../collection/immutable/ArraySeq.scala | 692 ++++++++++++++++++ 1 file changed, 692 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/ArraySeq.scala diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala new file mode 100644 index 000000000000..3a221fc76b6c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala @@ -0,0 +1,692 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.runtime.ScalaRunTime +import scala.util.Sorting +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define coll immutable array + * @define Coll `ArraySeq` + */ +sealed abstract class ArraySeq[+A] + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] + with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] + with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] + with Serializable + with Pure { + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + protected def elemTag: ClassTag[_] + + override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged + + /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break + * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. + * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an + * array of a supertype or subtype of the element type. */ + def unsafeArray: Array[_] + + def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]] + + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq + protected def iterableEvidence: ClassTag[A @uncheckedVariance @uncheckedCaptures] = elemTag.asInstanceOf[ClassTag[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit + + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): A + + override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { + val dest = new Array[Any](length) + Array.copy(unsafeArray, 0, dest, 0, length) + dest(index) = elem + ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] + } + + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)) + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + + override def prepended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]] + + override def appended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + + /** Fast concatenation of two [[ArraySeq]]s. + * + * @return null if optimisation not possible. + */ + private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { + // Optimise concatenation of two ArraySeqs + // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast + if (isEmpty) + that + else if (that.isEmpty) + this + else { + val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] + val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] + val mismatch = thisIsObj != thatIsObj + if (mismatch) + // Combining primatives and objects: abort + null + else if (thisIsObj) { + // A and B are objects + val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val ay = that.unsafeArray.asInstanceOf[Array[B @uncheckedCaptures]] + val len = ax.length + ay.length + val a = new Array[AnyRef](len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } else { + // A is a primative and B = A. Use this instance's protected ClassTag. + val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val ay = that.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val len = ax.length + ay.length + val a = iterableEvidence.newArray(len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + } + } + + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = { + def genericResult = { + val k = suffix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(unsafeArray) + b.addAll(suffix) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + suffix match { + case that: ArraySeq[_] => + val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = { + def genericResult = { + val k = prefix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + unsafeArray.length) + b.addAll(unsafeArray) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + prefix match { + case that: ArraySeq[_] => + val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] = + that match { + case bs: ArraySeq[B] => + ArraySeq.tabulate(length min bs.length) { i => + (apply(i), bs(i)) + } + case _ => + strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) + } + + private inline def ops[A](xs: Array[A @uncheckedCaptures]): ArrayOps[A] = new ArrayOps[A @uncheckedCaptures](xs) + + override def take(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]] + + override def takeRight(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + + override def drop(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]] + + override def dropRight(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + + override def slice(from: Int, until: Int): ArraySeq[A] = + if (from <= 0 && unsafeArray.length <= until) + this + else + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + + override def foldLeft[B](z: B)(f: (B, A) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = 0 + while (i < array.length) { + val a = array(i).asInstanceOf[A] + b = f(b, a) + i += 1 + } + b + } + + override def foldRight[B](z: B)(f: (A, B) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = array.length + while (i > 0) { + i -= 1 + val a = array(i).asInstanceOf[A] + b = f(a, b) + } + b + } + + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]] + + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]] + + override protected[this] def className = "ArraySeq" + + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(unsafeArray, 0, xs, start, copied) + } + copied + } + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = + if(unsafeArray.length <= 1) this + else { + val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] + } +} + +/** + * $factoryInfo + * @define coll immutable array + * @define Coll `ArraySeq` + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) + + def empty[A : ClassTag]: ArraySeq[A] = emptyImpl + + def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + case as: ArraySeq[A] => as + case _ => unsafeWrapArray(Array.from[A](it)) + } + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = + ArrayBuffer.newBuilder[A @uncheckedCaptures].mapResult(b => unsafeWrapArray[A](b.toArray)) + + override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { + val elements = Array.ofDim[A @uncheckedCaptures](scala.math.max(n, 0)) + var i = 0 + while (i < n) { + ScalaRunTime.array_update(elements, i, f(i)) + i = i + 1 + } + ArraySeq.unsafeWrapArray(elements) + } + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. Any changes to wrapped array will break the expected immutability. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T @uncheckedCaptures]): ArraySeq[T] = ((x: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): T = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any): Boolean = that match { + case that: ofRef[_] => + Array.equals( + this.unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { + if(unsafeArray.length <= 1) this + else { + val a = unsafeArray.clone() + Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) + new ArraySeq.ofRef(a) + } + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) + else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { + protected def elemTag = ClassTag.Byte + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Byte = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = + if(length <= 1) this + else if(ord eq Ordering.Byte) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofByte(a) + } else super.sorted[B] + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { + protected def elemTag = ClassTag.Short + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Short = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = + if(length <= 1) this + else if(ord eq Ordering.Short) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofShort(a) + } else super.sorted[B] + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { + protected def elemTag = ClassTag.Char + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Char = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = + if(length <= 1) this + else if(ord eq Ordering.Char) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofChar(a) + } else super.sorted[B] + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { + protected def elemTag = ClassTag.Int + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Int = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = + if(length <= 1) this + else if(ord eq Ordering.Int) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofInt(a) + } else super.sorted[B] + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { + protected def elemTag = ClassTag.Long + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Long = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = + if(length <= 1) this + else if(ord eq Ordering.Long) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofLong(a) + } else super.sorted[B] + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { + protected def elemTag = ClassTag.Float + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Float = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { + protected def elemTag = ClassTag.Double + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Double = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { + protected def elemTag = ClassTag.Boolean + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Boolean = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = + if(length <= 1) this + else if(ord eq Ordering.Boolean) { + val a = unsafeArray.clone() + Sorting.stableSort(a) + new ArraySeq.ofBoolean(a) + } else super.sorted[B] + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { + protected def elemTag = ClassTag.Unit + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Unit = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + } +} From 1f3fe9eb930f53f01c818b1bfe2ab02dcb0ed554 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 10:32:23 +0100 Subject: [PATCH 102/216] Add immutable HashSet and HashMap to stdlib --- .../collection/immutable/ChampCommon.scala | 253 ++ .../stdlib/collection/immutable/HashMap.scala | 2425 +++++++++++++++++ .../stdlib/collection/immutable/HashSet.scala | 2125 +++++++++++++++ 3 files changed, 4803 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/ChampCommon.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/HashMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/HashSet.scala diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala new file mode 100644 index 000000000000..fc9bcb022874 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala @@ -0,0 +1,253 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + + +import java.lang.Integer.bitCount +import java.lang.Math.ceil +import java.lang.System.arraycopy +import language.experimental.captureChecking + +private[collection] object Node { + final val HashCodeLength = 32 + + final val BitPartitionSize = 5 + + final val BitPartitionMask = (1 << BitPartitionSize) - 1 + + final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt + + final val BranchingFactor = 1 << BitPartitionSize + + final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask + + final def bitposFrom(mask: Int): Int = 1 << mask + + final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) + + final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) + +} + +private[collection] abstract class Node[T <: Node[T]] { + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): T + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): Any + + def getHash(index: Int): Int + + def cachedJavaKeySetHashCode: Int + + private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = + new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") + + protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a + * depth-first pre-order traversal, which yields first all payload elements of the current + * node before traversing sub-nodes (left to right). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { + + import Node.MaxDepth + + // Note--this code is duplicated to a large extent both in + // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. + // If you change this code, check those also in case they also + // need to be modified. + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] var nodeCursorsAndLengths: Array[Int] = _ + private[this] var nodes: Array[T] = _ + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + + def this(rootNode: T) = { + this() + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + + return false + } + + final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() + +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base + * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { + + import Node.MaxDepth + + protected var currentValueCursor: Int = -1 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) + private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] + + def this(rootNode: T) = { + this() + pushNode(rootNode) + searchNextValueNode() + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = node.payloadArity - 1 + } + + private final def pushNode(node: T): Unit = { + currentStackLevel = currentStackLevel + 1 + + nodeStack(currentStackLevel) = node + nodeIndex(currentStackLevel) = node.nodeArity - 1 + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for rightmost node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 + + if (nodeCursor >= 0) { + val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) + pushNode(nextNode) + } else { + val currNode = nodeStack(currentStackLevel) + popNode() + + if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } + } + } + + return false + } + + final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() + +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala new file mode 100644 index 000000000000..d2f144baa934 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -0,0 +1,2425 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import java.lang.Integer.bitCount +import java.lang.System.arraycopy + +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable, mutable.ReusableBuilder +import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} +import scala.runtime.AbstractFunction2 +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam K the type of the keys contained in this hash set. + * @tparam V the type of the values associated with the keys in this hash map. + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ + +final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) + extends AbstractMap[K, V] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with DefaultSerializable { + + def this() = this(MapNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + override def mapFactory: MapFactory[HashMap] = HashMap + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet + + private final class HashKeySet extends ImmutableKeySet { + + private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = + if (newHashMap eq HashMap.this) this else newHashMap.keySet + private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet + + override def incl(elem: K): Set[K] = { + val originalHash = elem.## + val improvedHash = improve(originalHash) + val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) + newKeySetOrThis(newNode) + } + override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) + override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) + override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) + } + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleIterator[K, V](rootNode) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapKeyIterator[K, V](rootNode) + } + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else new MapValueIterator[K, V](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleReverseIterator[K, V](rootNode) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override final def contains(key: K): Boolean = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) + } + + override def apply(key: K): V = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.apply(key, keyUnimprovedHash, keyHash, 0) + } + + def get(key: K): Option[V] = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.get(key, keyUnimprovedHash, keyHash, 0) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) + } + + @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode) + + def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) + } + + // preemptively overridden in anticipation of performance optimizations + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = + super.updatedWith[V1](key)(remappingFunction) + + def removed(key: K): HashMap[K, V] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) + } + + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match { + case hm: HashMap[K, V1] => + if (isEmpty) hm + else { + val newNode = rootNode.concat(hm.rootNode, 0) + if (newNode eq hm.rootNode) hm + else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) + } + case hm: mutable.HashMap[K @unchecked, V @unchecked] => + val iter = hm.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => + val iter = lhm.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case _ => + class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { + var changed = false + var shallowlyMutableNodeMap: Int = 0 + var current: BitmapIndexedMapNode[K, V1] = rootNode + def apply(kv: (K, V1)) = apply(kv._1, kv._2) + def apply(key: K, value: V1): Unit = { + val originalHash = key.## + val improved = improve(originalHash) + if (!changed) { + current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + changed = true + shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + } + } else { + shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) + } + } + } + that match { + case thatMap: Map[K, V1] => + if (thatMap.isEmpty) this + else { + val accum = new accum + thatMap.foreachEntry(accum) + newHashMapOrThis(accum.current) + } + case _ => + val it = that.iterator + if (it.isEmpty) this + else { + val accum = new accum + it.foreach(accum) + newHashMapOrThis(accum.current) + } + } + } + + override def tail: HashMap[K, V] = this - head._1 + + override def init: HashMap[K, V] = this - last._1 + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = reverseIterator.next() + + override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) + + override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) + + /** Applies a function to each key, value, and **original** hash value in this Map */ + @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + override def equals(that: Any): Boolean = + that match { + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) + case _ => super.equals(that) + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be + // immutable. + val hashIterator = new MapKeyValueTupleHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) + // assert(hash == super.hashCode()) + hash + } + } + + override protected[this] def className = "HashMap" + + /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge + * function to resolve any key collisions between the two HashMaps. + * + * @example {{{ + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(2 -> 2, 3 -> 2) + * + * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } + * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) + * + * }}} + * + * @param that the HashMap to merge this HashMap with + * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then + * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to + * `that.concat(this)` + * + * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or + * found in `this` or `that`, it is not defined which value will be chosen. For example: + * + * Colliding multiple results of merging: + * {{{ + * // key `3` collides between a result of merging keys `1` and `2` + * val left = HashMap(1 -> 1, 2 -> 2) + * val right = HashMap(1 -> 1, 2 -> 2) + * + * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } + * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) + * }}} + * Colliding results of merging with other keys: + * {{{ + * // key `2` collides between a result of merging `1`, and existing key `2` + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(1 -> 2) + * + * val merged = left.merged(right)((_,_) => 2 -> 3) + * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) + * }}} + * + */ + def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = + if (mergef == null) { + that ++ this + } else { + if (isEmpty) that + else if (that.isEmpty) this + else if (size == 1) { + val payload@(k, v) = rootNode.getPayload(0) + val originalHash = rootNode.getHash(0) + val improved = improve(originalHash) + + if (that.rootNode.containsKey(k, originalHash, improved, 0)) { + val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) + } + } else if (that.size == 0) { + val thatPayload@(k, v) = rootNode.getPayload(0) + val thatOriginalHash = rootNode.getHash(0) + val thatImproved = improve(thatOriginalHash) + + if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { + val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) + } + } else { + val builder = new HashMapBuilder[K, V1] + rootNode.mergeInto(that.rootNode, builder, 0)(mergef) + builder.result() + } + } + + override def transform[W](f: (K, V) => W): HashMap[K, W] = + newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] + + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashMap.empty + else new HashMap(newRootNode) + } + + override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + if (isEmpty) { + this + } else { + keys match { + case hashSet: HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree + // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` + val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) + if (newRootNode eq rootNode) this + else if (newRootNode.size <= 0) HashMap.empty + else new HashMap(newRootNode) + } + case hashSet: collection.mutable.HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + val iter = hashSet.nodeIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case lhashSet: collection.mutable.LinkedHashSet[K] => + if (lhashSet.isEmpty) { + this + } else { + val iter = lhashSet.entryIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case _ => + val iter = keys.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + } + } + + override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, + // based on the result of applying `p` to its elements and subnodes. + super.partition(p) + } + + override def take(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including + // those nodes in the resulting trie, until `n` total elements have been included. + super.take(n) + } + + override def takeRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and + // and including those nodes in the resulting trie, until `n` total elements have been included. + super.takeRight(n) + } + + override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and + // including those nodes in the resulting trie, until `p` returns `false` + super.takeWhile(p) + } + + override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and + // dropping those nodes in the resulting trie, until `p` returns `true` + super.dropWhile(p) + } + + override def dropRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse + // order, and dropping all nodes until `n` elements have been dropped + super.dropRight(n) + } + + override def drop(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and + // dropping all nodes until `n` elements have been dropped + super.drop(n) + } + + override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and + // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. + // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality + // checks. + super.span(p) + } + +} + +private[immutable] object MapNode { + + private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] + + final val TupleLength = 2 + +} + + +private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 + + def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean + + /** Returns a MapNode with the passed key-value assignment added + * + * @param key the key to add to the MapNode + * @param value the value to associate with `key` + * @param originalHash the original hash of `key` + * @param hash the improved hash of `key` + * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) + * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value + * argument. + * if false, then the key will be inserted if not already present, however if the key is present + * then the passed value will not replace the current value. That is, if `false`, then this + * method has `update if not exists` semantics. + */ + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): MapNode[K, V] + + def hasPayload: Boolean + + def payloadArity: Int + + def getKey(index: Int): K + + def getValue(index: Int): V + + def getPayload(index: Int): (K, V) + + def size: Int + + def foreach[U](f: ((K, V)) => U): Unit + + def foreachEntry[U](f: (K, V) => U): Unit + + def foreachWithHash(f: (K, V, Int) => Unit): Unit + + def transform[W](f: (K, V) => W): MapNode[K, W] + + def copy(): MapNode[K, V] + + def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] + + def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] + + /** Merges this node with that node, adding each resulting tuple to `builder` + * + * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` + * + * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, + * as `this` is, within the left tree + */ + def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit + + /** Returns the exact (equal by reference) key, and value, associated to a given key. + * If the key is not bound to a value, then an exception is thrown + */ + def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) + + /** Adds all key-value pairs to a builder */ + def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit +} + +private final class BitmapIndexedMapNode[K, +V]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { + + releaseFence() + + import MapNode._ + import Node._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] + def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] + + def getPayload(index: Int) = Tuple2( + content(TupleLength * index).asInstanceOf[K], + content(TupleLength * index + 1).asInstanceOf[V]) + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): MapNode[K, V] = + content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] + + def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException(s"key not found: $key") + } + } + + def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) Some(this.getValue(index)) else None + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + None + } + } + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val mask = maskFrom(hash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val payload = getPayload(index) + if (key == payload._1) payload else throw new NoSuchElementException + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException + } + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) getValue(index) else f + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) + } else { + f + } + } + + override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) + (originalHashes(index) == originalHash) && key == getKey(index) + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + false + } + } + + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + if (replaceValue) { + val value0 = this.getValue(index) + if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) + this + else copyAndSetValue(bitpos, key, value) + } else this + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) + + if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) + } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) + } + + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param value the value to set `key` to + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + val value0 = this.getValue(index) + if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + content(idx + 1) = value + } + shallowlyMutableNodeMap + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeHashCode = subNode.cachedJavaKeySetHashCode + + var returnMutableNodeMap = shallowlyMutableNodeMap + + val subNodeNew: MapNode[K, V1] = subNode match { + case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) + if (result ne subNode) { + returnMutableNodeMap |= bitpos + } + result + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode + returnMutableNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = insertElement(originalHashes, dataIx, originalHash) + this.size += 1 + this.cachedJavaKeySetHashCode += keyHash + shallowlyMutableNodeMap + } + } + + def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + + if (key0 == key) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) + if (index == 0) + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) + else + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) + } else copyAndRemoveValue(bitpos, keyHash) + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) + // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] + } else { + // inline value (move to front) + copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + copyAndSetNode(bitpos, subNode, subNodeNew) + } else this + } else this + } + + def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + val newCachedHash = keyHash0 + keyHash1 + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + + if (mask0 < mask1) { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) + } else { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) + new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + //dst(idx) = newKey + dst(idx + 1) = newValue + new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedMapNode[K, V1]( + dataMap, + nodeMap, + dst, + originalHashes, + size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) + } + + def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + // copy 'src' and remove 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) + } + + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the key currently at `bitpos` + * @param node the node to place at `bitpos` beneath `this` + */ + def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = dataMap ^ bitpos + this.nodeMap = nodeMap | bitpos + this.content = dst + this.originalHashes = dstHashes + this.size = size - 1 + node.size + this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, + originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + ) + } + + def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val key = node.getKey(0) + val value = node.getValue(0) + val src = this.content + val dst = new Array[Any](src.length - 1 + TupleLength) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 2 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = key + dst(idxNew + 1) = value + arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + override def foreach[U](f: ((K, V)) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreach(f) + j += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getKey(i), getValue(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachEntry(f) + j += 1 + } + } + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + var i = 0 + val iN = payloadArity // arity doesn't change during this operation + while (i < iN) { + f(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + var i = 0 + val iN = payloadArity + val jN = nodeArity + while (i < iN) { + builder.addOne(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + var j = 0 + while (j < jN) { + getNode(j).buildTo(builder) + j += 1 + } + } + + override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { + var newContent: Array[Any] = null + val iN = payloadArity // arity doesn't change during this operation + val jN = nodeArity // arity doesn't change during this operation + val newContentLength = content.length + var i = 0 + while (i < iN) { + val key = getKey(i) + val value = getValue(i) + val newValue = f(key, value) + if (newContent eq null) { + if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { + newContent = content.clone() + newContent(TupleLength * i + 1) = newValue + } + } else { + newContent(TupleLength * i + 1) = newValue + } + i += 1 + } + + var j = 0 + while (j < jN) { + val node = getNode(j) + val newNode = node.transform(f) + if (newContent eq null) { + if (newNode ne node) { + newContent = content.clone() + newContent(newContentLength - j - 1) = newNode + } + } else + newContent(newContentLength - j - 1) = newNode + j += 1 + } + if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] + else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) + } + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) { + that.buildTo(builder) + return + } else if (bm.size == 0) { + buildTo(builder) + return + } + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + val minIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + { + var index = minIndex + var leftIdx = 0 + var rightIdx = 0 + + while (index < maxIndex) { + val bitpos = bitposFrom(index) + + if ((bitpos & dataMap) != 0) { + val leftKey = getKey(leftIdx) + val leftValue = getValue(leftIdx) + val leftOriginalHash = getHash(leftIdx) + if ((bitpos & bm.dataMap) != 0) { + // left data and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { + builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) + } else { + builder.addOne(leftKey, leftValue, leftOriginalHash) + builder.addOne(rightKey, rightValue, rightOriginalHash) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + // left data and right node + val subNode = bm.getNode(bm.nodeIndex(bitpos)) + val leftImprovedHash = improve(leftOriginalHash) + val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftData and rightNode, just build both children to builder + subNode.buildTo(builder) + builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) + } + } else { + // left data and nothing on right + builder.addOne(leftKey, leftValue, leftOriginalHash) + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + // left node and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + val rightImprovedHash = improve(rightOriginalHash) + + val subNode = getNode(nodeIndex(bitpos)) + val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftNode and rightData, just build both children to builder + subNode.buildTo(builder) + builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) + } + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // left node and right node + getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) + } else { + // left node and nothing on right + getNode(nodeIndex(bitpos)).buildTo(builder) + } + } else if ((bitpos & bm.dataMap) != 0) { + // nothing on left, right data + val dataIndex = bm.dataIndex(bitpos) + builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // nothing on left, right node + bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) + } + + index += 1 + } + } + case _: HashCollisionMapNode[_, _] => + throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") + } + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedMapNode[_, _] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) + } + // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `bm` + var anyChangesMadeSoFar = false + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataRightOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + val leftOriginalHash = getHash(leftIdx) + if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { + leftDataRightDataRightOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { + // nothing from `this` will make it into the result -- return early + return bm + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val rightNode = bm.getNode(rightNodeIdx) + val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) + if (rightNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftKey = getKey(leftDataIdx) + val leftValue = getValue(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + + val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) + + if (updated ne n) { + anyChangesMadeSoFar = true + } + + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + getNode(leftNodeIdx).updated( + key = bm.getKey(rightDataIdx), + value = bm.getValue(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift, + replaceValue = true + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = originalHashes(leftDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedMapNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else bm + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + override def copy(): BitmapIndexedMapNode[K, V] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) * TupleLength + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() + i += 1 + } + new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else MapNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + MapNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize * TupleLength) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) + newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + + + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[MapNode[K, V]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty[MapNode[K, V] @uncheckedCaptures] + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue() + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + if (newSize == 0) { + MapNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) + newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(TupleLength * newDataIndex) = node.getKey(0) + newContent(TupleLength * newDataIndex + 1) = node.getValue(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + } +} + +private final class HashCollisionMapNode[K, +V ]( + val originalHash: Int, + val hash: Int, + var content: Vector[(K, V @uV) @uncheckedCaptures] + ) extends MapNode[K, V] { + + import Node._ + + require(content.length >= 2) + + releaseFence() + + private[immutable] def indexOf(key: Any): Int = { + val iter = content.iterator + var i = 0 + while (iter.hasNext) { + if (iter.next()._1 == key) return i + i += 1 + } + -1 + } + + def size: Int = content.length + + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException) + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = + if (this.hash == hash) { + val index = indexOf(key) + if (index >= 0) Some(content(index)._2) else None + } else None + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val index = indexOf(key) + if (index >= 0) content(index) else throw new NoSuchElementException + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { + if (this.hash == hash) { + indexOf(key) match { + case -1 => f + case other => content(other)._2 + } + } else f + } + + override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && indexOf(key) >= 0 + + def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = + this.hash == hash && { + val index = indexOf(key) + index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) + } + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { + val index = indexOf(key) + if (index >= 0) { + if (replaceValue) { + if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { + this + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) + } + } else { + this + } + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) + } + } + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { + if (!this.containsKey(key, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => + val (k, v) = updatedContent(0) + new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) + } + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): MapNode[K, V] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getKey(index: Int): K = getPayload(index)._1 + def getValue(index: Int): V = getPayload(index)._2 + + def getPayload(index: Int): (K, V) = content(index) + + override def getHash(index: Int): Int = originalHash + + def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) + + def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next._1, next._2, originalHash) + } + } + + override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { + val newContent = Vector.newBuilder[(K, W)] + val contentIter = content.iterator + // true if any values have been transformed to a different value via `f` + var anyChanges = false + while(contentIter.hasNext) { + val (k, v) = contentIter.next() + val newValue = f(k, v) + newContent.addOne((k, newValue)) + anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) + } + if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) + else this.asInstanceOf[HashCollisionMapNode[K, W]] + } + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionMapNode[_, _] => + (this eq node) || + (this.hash == node.hash) && + (this.content.length == node.content.length) && { + val iter = content.iterator + while (iter.hasNext) { + val (key, value) = iter.next() + val index = node.indexOf(key) + if (index < 0 || value != node.content(index)._2) { + return false + } + } + true + } + case _ => false + } + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { + case hc: HashCollisionMapNode[K, V1] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[(K, V1)] = null + val iter = content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (hc.indexOf(nextPayload._1) < 0) { + if (newContent eq null) { + newContent = new VectorBuilder[(K, V1)]() + newContent.addAll(hc.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedMapNode[K, V1] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case hc: HashCollisionMapNode[K, V1] => + val iter = content.iterator + val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] + + def rightIndexOf(key: K): Int = { + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i + i += 1 + } + -1 + } + + while (iter.hasNext) { + val nextPayload = iter.next() + val index = rightIndexOf(nextPayload._1) + + if (index == -1) { + builder.addOne(nextPayload) + } else { + val rightPayload = rightArray(index).asInstanceOf[(K, V1)] + rightArray(index) = null + + builder.addOne(mergef(nextPayload, rightPayload)) + } + } + + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) + i += 1 + } + case _: BitmapIndexedMapNode[K, V1] => + throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") + + } + + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + builder.addOne(k, v, originalHash, hash) + } + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + MapNode.empty + } else if (newContentLength == 1) { + val (k, v) = newContent.head + new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + } else if (newContentLength == content.length) this + else new HashCollisionMapNode(originalHash, hash, newContent) + } + + override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def cachedJavaKeySetHashCode: Int = size * hash + +} + +private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val key = currentValueNode.getKey(currentValueCursor) + currentValueCursor += 1 + + key + } + +} + +private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val value = currentValueNode.getValue(currentValueCursor) + currentValueCursor += 1 + + value + } +} + +private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } +} + +private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { + private[this] var hash = 0 + private[this] var value: V @uncheckedCaptures = _ + override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) + def next() = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + value = currentValueNode.getValue(currentValueCursor) + currentValueCursor -= 1 + this + } +} + +/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ +private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) { + /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ + def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { + var curr = rootMapNode + while (curr.size > 0 && hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + curr = curr.removed( + key = currentValueNode.getPayload(currentValueCursor), + keyHash = improve(originalHash), + originalHash = originalHash, + shift = 0 + ) + currentValueCursor += 1 + } + curr + } +} + +/** + * $factoryInfo + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + @transient + private final val EmptyMap = new HashMap(MapNode.empty) + + def empty[K, V]: HashMap[K, V] = + EmptyMap.asInstanceOf[HashMap[K, V]] + + def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] = + source match { + case hs: HashMap[K, V] => hs + case _ => (newBuilder[K, V] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] +} + + +/** A Builder for a HashMap. + * $multipleResults + */ +private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { + import MapNode._ + import Node._ + + private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashMap as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashMap[K, V] @uncheckedCaptures = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedMapNode[K, V] @uncheckedCaptures = newEmptyRootNode + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (rootNode.size == 0) value + else { + val originalHash = key.## + rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) + } + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap |= bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Upserts a key/value pair into mapNode, mutably */ + private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { + mapNode match { + case bm: BitmapIndexedMapNode[K, V] => + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val key0 = bm.getKey(index) + val key0UnimprovedHash = bm.getHash(index) + + if (key0UnimprovedHash == originalHash && key0 == key) { + bm.content(TupleLength * index + 1) = value + } else { + val value0 = bm.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew: MapNode[K, V] = + bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + } + + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHash = subNode.cachedJavaKeySetHashCode + update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash + } else { + insertValue(bm, bitpos, key, originalHash, keyHash, value) + } + case hc: HashCollisionMapNode[K, V] => + val index = hc.indexOf(key) + if (index < 0) { + hc.content = hc.content.appended((key, value)) + } else { + hc.content = hc.content.updated(index, (key, value)) + } + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private[this] def ensureUnaliased() = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private[this] def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashMap[K, V] = + if (rootNode.size == 0) { + HashMap.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashMap(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: (K, V)): this.type = { + ensureUnaliased() + val h = elem._1.## + val im = improve(h) + update(rootNode, elem._1, elem._2, h, im, 0) + this + } + + def addOne(key: K, value: V): this.type = { + ensureUnaliased() + val originalHash = key.## + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, hash, 0) + this + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + ensureUnaliased() + xs match { + case hm: HashMap[K, V] => + new ChampBaseIterator[MapNode[K, V]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + mapNode = rootNode, + key = currentValueNode.getKey(currentValueCursor), + value = currentValueNode.getValue(currentValueCursor), + originalHash = originalHash, + keyHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position + case hm: collection.mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case lhm: collection.mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case thatMap: Map[K, V] => + thatMap.foreachEntry((key, value) => addOne(key, value)) + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala new file mode 100644 index 000000000000..38f394a7005f --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala @@ -0,0 +1,2125 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.lang.Integer.{bitCount, numberOfTrailingZeros} +import java.lang.System.arraycopy + +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam A the type of the elements contained in this hash set. + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) + extends AbstractSet[A] + with StrictOptimizedSetOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with DefaultSerializable { + + def this() = this(SetNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = + if (rootNode eq newRootNode) this else new HashSet(newRootNode) + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + def iterator: Iterator[A] = { + if (isEmpty) Iterator.empty + else new SetIterator[A](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + def contains(element: A): Boolean = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + rootNode.contains(element, elementUnimprovedHash, elementHash, 0) + } + + def incl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + def excl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + override def concat(that: IterableOnce[A]): HashSet[A] = + that match { + case hs: HashSet[A] => + if (isEmpty) hs + else { + val newNode = rootNode.concat(hs.rootNode, 0) + if (newNode eq hs.rootNode) hs + else newHashSetOrThis(newNode) + } + case hs: collection.mutable.HashSet[A] => + val iter = hs.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case lhs: collection.mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case _ => + val iter = that.iterator + var current = rootNode + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + current = current.updated(element, originalHash, improved, 0) + + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + } + + override def tail: HashSet[A] = this - head + + override def init: HashSet[A] = this - last + + override def head: A = iterator.next() + + override def last: A = reverseIterator.next() + + override def foreach[U](f: A => U): Unit = rootNode.foreach(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ + @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set + * Stops iterating the first time that f returns `false`.*/ + @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) + + def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match { + case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) + case _ => super.subsetOf(that) + } + + override def equals(that: Any): Boolean = + that match { + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) + case _ => super.equals(that) + } + + override protected[this] def className = "HashSet" + + override def hashCode(): Int = { + val it = new SetHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) + //assert(hash == super.hashCode()) + hash + } + + override def diff(that: collection.Set[A]): HashSet[A] = { + if (isEmpty) { + this + } else { + that match { + case hashSet: HashSet[A] => + if (hashSet.isEmpty) this else { + val newRootNode = rootNode.diff(hashSet.rootNode, 0) + if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) + } + case hashSet: collection.mutable.HashSet[A] => + val iter = hashSet.nodeIterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next.key, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + + case other => + val thatKnownSize = other.knownSize + + if (thatKnownSize == 0) { + this + } else if (thatKnownSize <= size) { + /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so + we're likely to be the faster of the two at that. */ + removedAllWithShallowMutations(other) + } else { + // TODO: Develop more sophisticated heuristic for which branch to take + filterNot(other.contains) + } + } + + } + } + + /** Immutably removes all elements of `that` from this HashSet + * + * Mutation is used internally, but only on root SetNodes which this method itself creates. + * + * That is, this method is safe to call on published sets because it does not mutate `this` + */ + private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { + val iter = that.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + } + + override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { + case set: scala.collection.Set[A] => diff(set) + case range: Range if range.length > size => + filter { + case i: Int => !range.contains(i) + case _ => true + } + + case _ => + removedAllWithShallowMutations(that) + } + + override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.partition(p) + } + + override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.span(p) + } + + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashSet.empty + else new HashSet(newRootNode) + } + + override def intersect(that: collection.Set[A]): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.intersect(that) + } + + override def take(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.take(n) + } + + override def takeRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeRight(n) + } + + override def takeWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeWhile(p) + } + + override def drop(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.drop(n) + } + + override def dropRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropRight(n) + } + + override def dropWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropWhile(p) + } +} + +private[immutable] object SetNode { + + private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] + + final val TupleLength = 1 + +} + +private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): SetNode[A] + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): A + + def size: Int + + def foreach[U](f: A => U): Unit + + def subsetOf(that: SetNode[A], shift: Int): Boolean + + def copy(): SetNode[A] + + def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] + + def diff(that: SetNode[A], shift: Int): SetNode[A] + + def concat(that: SetNode[A], shift: Int): SetNode[A] + + def foreachWithHash(f: (A, Int) => Unit): Unit + + def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean +} + +private final class BitmapIndexedSetNode[A]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends SetNode[A] { + + import Node._ + import SetNode._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new SetIterator[A](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[SetNode[_]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[SetNode[_]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getPayload(index: Int): A = content(index).asInstanceOf[A] + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] + + def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + return originalHashes(index) == originalHash && element == this.getPayload(index) + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) + } + + false + } + + def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { + return this + } else { + val element0UnimprovedHash = getHash(index) + val element0Hash = improve(element0UnimprovedHash) + if (originalHash == element0UnimprovedHash && element0 == element) { + return this + } else { + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) + } + } + } + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNode eq subNodeNew) { + return this + } else { + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + copyAndInsertValue(bitpos, element, originalHash, elementHash) + } + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated value is located in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated value is located, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = getPayload(index) + val element0UnimprovedHash = getHash(index) + if (element0UnimprovedHash == originalHash && element0 == element) { + shallowlyMutableNodeMap + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode + + var returnNodeMap = shallowlyMutableNodeMap + + val subNodeNew: SetNode[A] = subNode match { + case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNodeNew ne subNode) { + returnNodeMap |= bitpos + } + subNodeNew + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + returnNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = element + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = dstHashes + this.size += 1 + this.cachedJavaKeySetHashCode += elementHash + shallowlyMutableNodeMap + } + } + + + def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) + if (index == 0) + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) + else + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) + } + else return copyAndRemoveValue(bitpos, elementHash) + } else return this + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] + } else { + // inline value (move to front) + return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + this + } + /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new + * node + * + * Should only be called on root nodes, because shift is assumed to be 0 + * + * @param element the element to remove + * @param originalHash the original hash of `element` + * @param elementHash the improved hash of `element` + */ + def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { + val mask = maskFrom(elementHash, 0) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + val newDataMap = dataMap ^ bitpos + if (index == 0) { + val newContent = Array[Any](getPayload(1)) + val newOriginalHashes = Array(originalHashes(1)) + val newCachedJavaKeySetHashCode = improve(getHash(1)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } else { + val newContent = Array[Any](getPayload(0)) + val newOriginalHashes = Array(originalHashes(0)) + val newCachedJavaKeySetHashCode = improve(getHash(0)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } + this.dataMap = newDataMap + this.nodeMap = 0 + this.size = 1 + this + } + else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = this.dataMap ^ bitpos + this.content = dst + this.originalHashes = dstHashes + this.size -= 1 + this.cachedJavaKeySetHashCode -= elementHash + this + } + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] + + if (subNodeNew eq subNode) return this + + if (subNodeNew.size == 1) { + if (this.payloadArity == 0 && this.nodeArity == 1) { + this.dataMap = subNodeNew.dataMap + this.nodeMap = subNodeNew.nodeMap + this.content = subNodeNew.content + this.originalHashes = subNodeNew.originalHashes + this.size = subNodeNew.size + this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode + this + } else { + migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) + this + } + } else { + // size must be > 1 + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size -= 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + this + } + } else this + } + + def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + val newCachedHashCode = keyHash0 + keyHash1 + + if (mask0 < mask1) { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) + } else { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) + + new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedSetNode[A]( + dataMap = dataMap, + nodeMap = nodeMap, + content = dst, + originalHashes = originalHashes, + size = size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + 1) + + // copy 'src' and insert 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + 1, src.length - idx) + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) + } + + def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = key + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - 1) + + // copy 'src' and remove 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) + } + + def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode + ) + } + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the element currently at `bitpos` + * @param node the node to place at `bitpos` + */ + def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) + content(idxNew) = node + + this.dataMap = this.dataMap ^ bitpos + this.nodeMap = this.nodeMap | bitpos + this.originalHashes = removeElement(originalHashes, dataIx) + this.size = this.size - 1 + node.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = node.getPayload(0) + arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedSetNode[A]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the node to migrate inline + * @param oldNode the node currently stored at position `bitpos` + * @param node the node containing the single element to migrate inline + */ + def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val element = node.getPayload(0) + arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) + content(dataIxNew) = element + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + + this.dataMap = this.dataMap | bitpos + this.nodeMap = this.nodeMap ^ bitpos + this.originalHashes = dstHashes + this.size = this.size - oldNode.size + 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + } + + def foreach[U](f: A => U): Unit = { + val thisPayloadArity = payloadArity + var i = 0 + while (i < thisPayloadArity) { + f(getPayload(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity) { + getNode(j).foreach(f) + j += 1 + } + } + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case _: HashCollisionSetNode[A] => false + case node: BitmapIndexedSetNode[A] => + val thisBitmap = this.dataMap | this.nodeMap + val nodeBitmap = node.dataMap | node.nodeMap + + if ((thisBitmap | nodeBitmap) != nodeBitmap) + return false + + var bitmap = thisBitmap & nodeBitmap + var bitsToSkip = numberOfTrailingZeros(bitmap) + + var isValidSubset = true + while (isValidSubset && bitsToSkip < HashCodeLength) { + val bitpos = bitposFrom(bitsToSkip) + + isValidSubset = + if ((this.dataMap & bitpos) != 0) { + if ((node.dataMap & bitpos) != 0) { + // Data x Data + val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) + val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) + payload0 == payload1 + } else { + // Data x Node + val thisDataIndex = indexFrom(this.dataMap, bitpos) + val payload = this.getPayload(thisDataIndex) + val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) + val elementUnimprovedHash = getHash(thisDataIndex) + val elementHash = improve(elementUnimprovedHash) + subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) + } + } else { + // Node x Node + val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) + val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) + subNode0.subsetOf(subNode1, shift + BitPartitionSize) + } + + val newBitmap = bitmap ^ bitpos + bitmap = newBitmap + bitsToSkip = numberOfTrailingZeros(newBitmap) + } + isValidSubset + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else SetNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + SetNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex) = content(oldIndex) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + + // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, + // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in + // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may + // return at runtime a SetNode[A], or a tuple of (A, Int, Int) + + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + } + + override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) this + else if (size == 1) { + val h = getHash(0) + if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val originalHash = getHash(dataIndex) + val hash = improve(originalHash) + + if (!bm.contains(payload, originalHash, hash, shift)) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += hash + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + + val newSubNode: SetNode[A] = + if ((bitpos & bm.dataMap) != 0) { + val thatDataIndex = indexFrom(bm.dataMap, bitpos) + val thatPayload = bm.getPayload(thatDataIndex) + val thatOriginalHash = bm.getHash(thatDataIndex) + val thatHash = improve(thatOriginalHash) + oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) + } else if ((bitpos & bm.nodeMap) != 0) { + oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) + } else { + oldSubNode + } + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + case _: HashCollisionSetNode[A] => + // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the + // same depth + throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") + } + + /** Utility method only for use in `diff` and `filterImpl` + * + * @param newSize the size of the new SetNode + * @param newDataMap the dataMap of the new SetNode + * @param newNodeMap the nodeMap of the new SetNode + * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new + * SetNode + * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new + * SetNode + * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode + * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, + * but which were nodes in `this` + * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated + * to data, in positions in the `nodeMigrateToDataTargetMap` + * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode + * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode + * @param newCachedHashCode the cached java keyset hashcode of the new SetNode + */ + private[this] def newNodeFrom( + newSize: Int, + newDataMap: Int, + newNodeMap: Int, + minimumIndex: Int, + oldDataPassThrough: Int, + nodesToPassThroughMap: Int, + nodeMigrateToDataTargetMap: Int, + nodesToMigrateToData: mutable.Queue[SetNode[A]], + mapOfNewNodes: Int, + newNodes: mutable.Queue[SetNode[A]], + newCachedHashCode: Int): BitmapIndexedSetNode[A] = { + if (newSize == 0) { + SetNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex) = getPayload(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(newDataIndex) = node.getPayload(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedSetNode[_] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy(): BitmapIndexedSetNode[A] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() + i += 1 + } + new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) + } + + // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `this` + var anyChangesMadeSoFar = false + + // bitmap containing `1` in any position that has any descendant in either left or right, either data or node + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataLeftOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { + leftDataRightDataLeftOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { + // nothing from `bm` will make it into the result -- return early + return this + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val leftNode = getNode(leftNodeIdx) + val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) + if (leftNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftPayload = getPayload(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + val leftNode = getNode(leftNodeIdx) + val updated = leftNode.updated( + element = bm.getPayload(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift + ) + if (updated ne leftNode) { + anyChangesMadeSoFar = true + } + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + val originalHash = originalHashes(leftDataIdx) + newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedSetNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else this + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + val thisPayloadArity = payloadArity + var pass = true + var i = 0 + while (i < thisPayloadArity && pass) { + pass &&= f(getPayload(i), getHash(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity && pass) { + pass &&= getNode(j).foreachWithHashWhile(f) + j += 1 + } + pass + } +} + +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A] @uncheckedCaptures) extends SetNode[A] { + + import Node._ + + require(content.length >= 2) + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && content.contains(element) + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (this.contains(element, originalHash, hash, shift)) { + this + } else { + new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) + } + + /** + * Remove an element from the hash collision node. + * + * When after deletion only one element remains, we return a bit-mapped indexed node with a + * singleton element and a hash-prefix for trie level 0. This node will be then a) either become + * the new root, or b) unwrapped and inlined deeper in the trie. + */ + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (!this.contains(element, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(element0 => element0 == element) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) + case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): SetNode[A] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getPayload(index: Int): A = content(index) + + override def getHash(index: Int): Int = originalHash + + def size: Int = content.length + + def foreach[U](f: A => U): Unit = { + val iter = content.iterator + while (iter.hasNext) { + f(iter.next()) + } + } + + + override def cachedJavaKeySetHashCode: Int = size * hash + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case node: HashCollisionSetNode[A] => + this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) + case _ => + false + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + SetNode.empty + } else if (newContentLength == 1) { + new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) + } else if (newContent.length == content.length) this + else new HashCollisionSetNode(originalHash, hash, newContent) + } + + override def diff(that: SetNode[A], shift: Int): SetNode[A] = + filterImpl(that.contains(_, originalHash, hash, shift), true) + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionSetNode[_] => + (this eq node) || + (this.hash == node.hash) && + (this.content.size == node.content.size) && + this.content.forall(node.content.contains) + case _ => false + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy() = new HashCollisionSetNode[A](originalHash, hash, content) + + override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { + case hc: HashCollisionSetNode[A] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[A] = null + val iter = hc.content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (!content.contains(nextPayload)) { + if (newContent eq null) { + newContent = new VectorBuilder() + newContent.addAll(this.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedSetNode[A] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next.asInstanceOf[A], originalHash) + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + var stillGoing = true + val iter = content.iterator + while (iter.hasNext && stillGoing) { + val next = iter.next() + stillGoing &&= f(next.asInstanceOf[A], originalHash) + } + stillGoing + } +} + +private final class SetIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class SetReverseIterator[A](rootNode: SetNode[A]) + extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next(): A = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } + +} + +private final class SetHashIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] { + private[this] var hash = 0 + override def hashCode(): Int = hash + + def next(): AnyRef = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + currentValueCursor += 1 + this + } + +} + + +/** + * $factoryInfo + * + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + @transient + private final val EmptySet = new HashSet(SetNode.empty) + + def empty[A]: HashSet[A] = + EmptySet.asInstanceOf[HashSet[A]] + + def from[A](source: collection.IterableOnce[A]^): HashSet[A] = + source match { + case hs: HashSet[A] => hs + case _ if source.knownSize == 0 => empty[A] + case _ => (newBuilder[A] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder +} + +/** Builder for HashSet. + * $multipleResults + */ +private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { + import Node._ + import SetNode._ + + private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashSet as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashSet[A] @uncheckedCaptures = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedSetNode[A] @uncheckedCaptures = newEmptyRootNode + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap = bm.dataMap | bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ + private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + bm.content(idx) = elem + } + + def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = + setNode match { + case bm: BitmapIndexedSetNode[A] => + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val element0 = bm.getPayload(index) + val element0UnimprovedHash = bm.getHash(index) + + if (element0UnimprovedHash == originalHash && element0 == element) { + setValue(bm, bitpos, element0) + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + } + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHashCode = subNode.cachedJavaKeySetHashCode + update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode + } else { + insertValue(bm, bitpos, element, originalHash, elementHash) + } + case hc: HashCollisionSetNode[A] => + val index = hc.content.indexOf(element) + if (index < 0) { + hc.content = hc.content.appended(element) + } else { + hc.content = hc.content.updated(index, element) + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private def ensureUnaliased():Unit = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashSet[A] = + if (rootNode.size == 0) { + HashSet.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashSet(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: A): this.type = { + ensureUnaliased() + val h = elem.## + val im = improve(h) + update(rootNode, elem, h, im, 0) + this + } + + override def addAll(xs: IterableOnce[A]^) = { + ensureUnaliased() + xs match { + case hm: HashSet[A] => + new ChampBaseIterator[SetNode[A]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + setNode = rootNode, + element = currentValueNode.getPayload(currentValueCursor), + originalHash = originalHash, + elementHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} From 528c24999addbfe07461380295cbc3dc83116213 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 10:41:32 +0100 Subject: [PATCH 103/216] Add immutable IntMap and LongMap to stdlib --- .../stdlib/collection/immutable/IntMap.scala | 504 ++++++++++++++++++ .../stdlib/collection/immutable/LongMap.scala | 492 +++++++++++++++++ 2 files changed, 996 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/IntMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/LongMap.scala diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala new file mode 100644 index 000000000000..d7077845b845 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala @@ -0,0 +1,504 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** Utility class for integer maps. + */ +private[immutable] object IntMapUtils extends BitOperations.Int { + def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) + else IntMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { + case (left, IntMap.Nil) => left + case (IntMap.Nil, right) => right + case (left, right) => IntMap.Bin(prefix, mask, left, right) + } +} + +import IntMapUtils._ + +/** A companion object for integer maps. + * + * @define Coll `IntMap` + */ +object IntMap { + def empty[T] : IntMap[T] = IntMap.Nil + + def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) + + def apply[T](elems: (Int, T)*): IntMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] = + newBuilder[V].addAll(coll).result() + + private[immutable] case object Nil extends IntMap[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any) = that match { + case _: this.type => true + case _: IntMap[_] => false // The only empty IntMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] + else IntMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { + def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] + else IntMap.Bin[S](prefix, mask, left, right) + } + } + + def newBuilder[V]: Builder[(Int, V), IntMap[V]] = + new ImmutableBuilder[(Int, V), IntMap[V]](empty) { + def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } + } + + implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it) + def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) + implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty IntMap. +private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and + // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 and + var index = 0 + var buffer = new Array[AnyRef](33) + + def pop = { + index -= 1 + buffer(index).asInstanceOf[IntMap[V]] + } + + def push(x: IntMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: IntMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop match { + case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case IntMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@IntMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap + // and don't return an IntMapIterator for IntMap.Nil. + case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { + def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.value +} + +private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.key +} + +import IntMap._ + +/** Specialised immutable map structure for integer keys, based on + * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * '''Note:''' This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with integer keys. + * + * @define Coll `immutable.IntMap` + * @define coll immutable integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class IntMap[+T] extends AbstractMap[Int, T] + with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] = + intMapFrom[T](coll) + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = { + val b = IntMap.newBuilder[V2] + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Int, T), IntMap[T]](empty) { + def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } + } + + override def empty: IntMap[T] = IntMap.Nil + + override def toList = { + val buffer = new scala.collection.mutable.ListBuffer[(Int, T) @uncheckedCaptures] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of integer keys and corresponding values. + */ + def iterator: Iterator[(Int, T)] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Int, T)) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case IntMap.Tip(key, value) => f((key, value)) + case IntMap.Nil => + } + + override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case IntMap.Tip(key, value) => f(key, value) + case IntMap.Nil => + } + + override def keysIterator: Iterator[Int] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Int => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case IntMap.Tip(key, _) => f(key) + case IntMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as `values.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case IntMap.Tip(_, value) => f(value) + case IntMap.Nil => + } + + override protected[this] def className = "IntMap" + + override def isEmpty = this eq IntMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case IntMap.Tip(key, value) => + if (f((key, value))) this + else IntMap.Nil + case IntMap.Nil => IntMap.Nil + } + + override def transform[S](f: (Int, T) => S): IntMap[S] = this match { + case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) + case IntMap.Nil => IntMap.Nil + } + + final override def size: Int = this match { + case IntMap.Nil => 0 + case IntMap.Tip(_, _) => 1 + case IntMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Int): Option[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None + case IntMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Int, default: => S): S = this match { + case IntMap.Nil => default + case IntMap.Tip(key2, value) => if (key == key2) value else default + case IntMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Int): T = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case IntMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) + else IntMap.Bin(prefix, mask, left, right.updated(key, value)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, value) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) + + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) + + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = + super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = + strictOptimizedCollect(IntMap.newBuilder[V2], pf) + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to: + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update + * @param value The value to use if there is no conflict + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, f(value2, value)) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def removed (key: Int): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case IntMap.Tip(key2, _) => + if (key == key2) IntMap.Nil + else this + case IntMap.Nil => IntMap.Nil + } + + /** + * A combined transform and filter function. Returns an `IntMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] + else bin(prefix, mask, newleft, newright) + case IntMap.Tip(key, value) => f(key, value) match { + case None => + IntMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] + else IntMap.Tip(key, value2) + } + case IntMap.Nil => + IntMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ + case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) + case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (IntMap.Nil, x) => x + case (x, IntMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { + case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) IntMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) IntMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (IntMap.Tip(key, value), that) => that.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value, value2)) + } + case (_, IntMap.Tip(key, value)) => this.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value2, value)) + } + case (_, _) => IntMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings + * as this but only for keys which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: IntMap[R]): IntMap[T] = + this.intersectionWith(that, (key: Int, value: T, value2: R) => value) + + def ++[S >: T](that: IntMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + /** + * The entry with the lowest key value considered in unsigned order. + */ + @tailrec + final def firstKey: Int = this match { + case Bin(_, _, l, r) => l.firstKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + /** + * The entry with the highest key value considered in unsigned order. + */ + @tailrec + final def lastKey: Int = this match { + case Bin(_, _, l, r) => r.lastKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala new file mode 100644 index 000000000000..4abf433273f2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala @@ -0,0 +1,492 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.IllegalStateException + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** Utility class for long maps. + */ +private[immutable] object LongMapUtils extends BitOperations.Long { + def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) + + def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) + else LongMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { + case (left, LongMap.Nil) => left + case (LongMap.Nil, right) => right + case (left, right) => LongMap.Bin(prefix, mask, left, right) + } +} + +import LongMapUtils._ + +/** A companion object for long maps. + * + * @define Coll `LongMap` + */ +object LongMap { + def empty[T]: LongMap[T] = LongMap.Nil + def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) + def apply[T](elems: (Long, T)*): LongMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] = + newBuilder[V].addAll(coll).result() + + def newBuilder[V]: Builder[(Long, V), LongMap[V]] = + new ImmutableBuilder[(Long, V), LongMap[V]](empty) { + def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } + } + + private[immutable] case object Nil extends LongMap[Nothing] { + // Important, don't remove this! See IntMap for explanation. + override def equals(that : Any) = that match { + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] + else LongMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { + def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] + else LongMap.Bin[S](prefix, mask, left, right) + } + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty LongMap. +private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and + // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 65 + var index = 0 + var buffer = new Array[AnyRef](65) + + def pop() = { + index -= 1 + buffer(index).asInstanceOf[LongMap[V]] + } + + def push(x: LongMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: LongMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop() match { + case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case LongMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@LongMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap + // and don't return an LongMapIterator for LongMap.Nil. + case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ + def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.value +} + +private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.key +} + +/** + * Specialised immutable map structure for long keys, based on + * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * Note: This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with the long keys. + * + * @define Coll `immutable.LongMap` + * @define coll immutable long integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class LongMap[+T] extends AbstractMap[Long, T] + with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Long, T), LongMap[T]](empty) { + def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } + } + + override def empty: LongMap[T] = LongMap.Nil + + override def toList = { + val buffer = new ListBuffer[(Long, T) @uncheckedCaptures] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of long keys and corresponding values. + */ + def iterator: Iterator[(Long, T)] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Long, T)) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case LongMap.Tip(key, value) => f((key, value)) + case LongMap.Nil => + } + + override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case LongMap.Tip(key, value) => f(key, value) + case LongMap.Nil => + } + + override def keysIterator: Iterator[Long] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as keys.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Long => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case LongMap.Tip(key, _) => f(key) + case LongMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as values.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case LongMap.Tip(_, value) => f(value) + case LongMap.Nil => + } + + override protected[this] def className = "LongMap" + + override def isEmpty = this eq LongMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => + if (f((key, value))) this + else LongMap.Nil + case LongMap.Nil => LongMap.Nil + } + + override def transform[S](f: (Long, T) => S): LongMap[S] = this match { + case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) + case LongMap.Nil => LongMap.Nil + } + + final override def size: Int = this match { + case LongMap.Nil => 0 + case LongMap.Tip(_, _) => 1 + case LongMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Long): Option[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None + case LongMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Long, default: => S): S = this match { + case LongMap.Nil => default + case LongMap.Tip(key2, value) => if (key == key2) value else default + case LongMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Long): T = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case LongMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) + else LongMap.Bin(prefix, mask, left, right.updated(key, value)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, value) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update. + * @param value The value to use if there is no conflict. + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, f(value2, value)) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + def removed(key: Long): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case LongMap.Tip(key2, _) => + if (key == key2) LongMap.Nil + else this + case LongMap.Nil => LongMap.Nil + } + + /** + * A combined transform and filter function. Returns an `LongMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => f(key, value) match { + case None => LongMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] + else LongMap.Tip(key, value2) + } + case LongMap.Nil => LongMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ + case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) + case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (LongMap.Nil, x) => x + case (x, LongMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { + case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) LongMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) LongMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (LongMap.Tip(key, value), that) => that.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value, value2)) + } + case (_, LongMap.Tip(key, value)) => this.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value2, value)) + } + case (_, _) => LongMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings as this but only for keys + * which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: LongMap[R]): LongMap[T] = + this.intersectionWith(that, (key: Long, value: T, value2: R) => value) + + def ++[S >: T](that: LongMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + @tailrec + final def firstKey: Long = this match { + case LongMap.Bin(_, _, l, r) => l.firstKey + case LongMap.Tip(k, v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + @tailrec + final def lastKey: Long = this match { + case LongMap.Bin(_, _, l, r) => r.lastKey + case LongMap.Tip(k , v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = + super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) +} From 02c62814dc7f13c683f42a8eb26aed687ef132c2 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 10:47:00 +0100 Subject: [PATCH 104/216] Add immutable ListSet and ListMap to stdlib --- .../stdlib/collection/immutable/ListMap.scala | 373 ++++++++++++++++++ .../stdlib/collection/immutable/ListSet.scala | 140 +++++++ 2 files changed, 513 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/ListMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ListSet.scala diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala new file mode 100644 index 000000000000..c5000d785144 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala @@ -0,0 +1,373 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.mutable.ReusableBuilder +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** + * This class implements immutable maps using a list-based data structure. List map iterators and + * traversal methods visit key-value pairs in the order they were first inserted. + * + * Entries are stored internally in reversed insertion order, which means the newest key is at the + * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` + * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes + * this collection suitable only for a small number of elements. + * + * Instances of `ListMap` represent empty maps; they can be either created by calling the + * constructor directly, or by applying the function `ListMap.empty`. + * + * @tparam K the type of the keys contained in this list map + * @tparam V the type of the values associated with the keys + * + * @define Coll ListMap + * @define coll list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListMap[K, +V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + override def size: Int = 0 + + override def isEmpty: Boolean = true + + override def knownSize: Int = 0 + def get(key: K): Option[V] = None + + def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) + + def removed(key: K): ListMap[K, V] = this + + def iterator: Iterator[(K, V)] = { + var curr: ListMap[K, V] = this + var res: List[(K, V)] = Nil + while (curr.nonEmpty) { + res = (curr.key, curr.value) :: res + curr = curr.next + } + res.iterator + } + + override def keys: Iterable[K] = { + var curr: ListMap[K, V] = this + var res: List[K] = Nil + while (curr.nonEmpty) { + res = curr.key :: res + curr = curr.next + } + res + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration + // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. + val _reversed = new immutable.AbstractMap[K, V] { + override def isEmpty: Boolean = ListMap.this.isEmpty + override def removed(key: K): Map[K, V] = ListMap.this.removed(key) + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) + override def get(key: K): Option[V] = ListMap.this.get(key) + override def iterator: Iterator[(K, V)] = ListMap.this.iterator + override def foreachEntry[U](f: (K, V) => U): Unit = { + var curr: ListMap[K, V] = ListMap.this + while (curr.nonEmpty) { + f(curr.key, curr.value) + curr = curr.next + } + } + } + MurmurHash3.mapHash(_reversed) + } + } + + private[immutable] def key: K = throw new NoSuchElementException("key of empty map") + private[immutable] def value: V = throw new NoSuchElementException("value of empty map") + private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") + + override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) + override protected[this] def className = "ListMap" + +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list map with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] + * section on `List Maps` for more information. + * @define Coll ListMap + * @define coll list map + */ +@SerialVersionUID(3L) +object ListMap extends MapFactory[ListMap] { + /** + * Represents an entry in the `ListMap`. + */ + private[immutable] final class Node[K, V]( + override private[immutable] val key: K, + private[immutable] var _value: V @uncheckedCaptures, + private[immutable] var _init: ListMap[K, V] @uncheckedCaptures + ) extends ListMap[K, V] { + releaseFence() + + override private[immutable] def value: V = _value + + override def size: Int = sizeInternal(this, 0) + + @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = + if (cur.isEmpty) acc + else sizeInternal(cur.next, acc + 1) + + override def isEmpty: Boolean = false + + override def knownSize: Int = -1 + + @throws[NoSuchElementException] + override def apply(k: K): V = applyInternal(this, k) + + @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = + if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) + else if (k == cur.key) cur.value + else applyInternal(cur.next, k) + + override def get(k: K): Option[V] = getInternal(this, k) + + @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = + if (cur.isEmpty) None + else if (k == cur.key) Some(cur.value) + else getInternal(cur.next, k) + + override def contains(k: K): Boolean = containsInternal(this, k) + + @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = + if (cur.isEmpty) false + else if (k == cur.key) true + else containsInternal(cur.next, k) + + override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { + + var index = -1 // the index (in reverse) where the key to update exists, if it is found + var found = false // true if the key is found int he map + var isDifferent = false // true if the key was found and the values are different + + { + var curr: ListMap[K, V] = this + + while (curr.nonEmpty && !found) { + if (k == curr.key) { + found = true + isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] + } + index += 1 + curr = curr.init + } + } + + if (found) { + if (isDifferent) { + var newHead: ListMap.Node[K, V1] = null + var prev: ListMap.Node[K, V1] = null + var curr: ListMap[K, V1] = this + var i = 0 + while (i < index) { + val temp = new ListMap.Node(curr.key, curr.value, null) + if (prev ne null) { + prev._init = temp + } + prev = temp + curr = curr.init + if (newHead eq null) { + newHead = prev + } + i += 1 + } + val newNode = new ListMap.Node(curr.key, v, curr.init) + if (prev ne null) { + prev._init = newNode + } + releaseFence() + if (newHead eq null) newNode else newHead + } else { + this + } + } else { + new ListMap.Node(k, v, this) + } + } + + @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = + if (cur.isEmpty) acc.last + else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } + else removeInternal(k, cur.next, cur :: acc) + + override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) + + override private[immutable] def next: ListMap[K, V] = _init + + override def last: (K, V) = (key, value) + override def init: ListMap[K, V] = next + + } + + def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] + + private object EmptyListMap extends ListMap[Any, Nothing] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] = + it match { + case lm: ListMap[K, V] => lm + case lhm: collection.mutable.LinkedHashMap[K, V] => + // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each + // key-value pair + var current: ListMap[K, V] = empty[K, V] + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + current = new Node(firstEntry.key, firstEntry.value, current) + firstEntry = firstEntry.later + } + current + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end + var current: ListMap[K, V] = empty[K, V] + val iter = it.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + current = new Node(k, v, current) + } + current + + case _ => (newBuilder[K, V] ++= it).result() + } + + /** Returns a new ListMap builder + * + * The implementation safely handles additions after `result()` without calling `clear()` + * + * @tparam K the map key type + * @tparam V the map value type + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] + + @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { + if (map.isEmpty) prevValue + else foldRightInternal(map.init, op(map.last, prevValue), op) + } +} + +/** Builder for ListMap. + * $multipleResults + */ +private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { + private[this] var isAliased: Boolean = false + private[this] var underlying: ListMap[K, V] @uncheckedCaptures = ListMap.empty + + override def clear(): Unit = { + underlying = ListMap.empty + isAliased = false + } + + override def result(): ListMap[K, V] = { + isAliased = true + releaseFence() + underlying + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + + @tailrec + private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { + case n: ListMap.Node[K, V] => + if (n.key == key) { + n._value = value + true + } else { + insertValueAtKeyReturnFound(n.init, key, value) + } + case _ => false + } + + def addOne(key: K, value: V): this.type = { + if (isAliased) { + underlying = underlying.updated(key, value) + } else { + if (!insertValueAtKeyReturnFound(underlying, key, value)) { + underlying = new ListMap.Node(key, value, underlying) + } + } + this + } + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + if (isAliased) { + super.addAll(xs) + } else if (underlying.nonEmpty) { + xs match { + case m: collection.Map[K, V] => + // if it is a map, then its keys will not collide with themselves. + // therefor we only need to check the already-existing elements for collisions. + // No need to check the entire list + + val iter = m.iterator + var newUnderlying = underlying + while (iter.hasNext) { + val next = iter.next() + if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { + newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) + } + } + underlying = newUnderlying + this + + case _ => + super.addAll(xs) + } + } else xs match { + case lhm: collection.mutable.LinkedHashMap[K, V] => + // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) + firstEntry = firstEntry.later + } + this + + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + val iter = xs.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + underlying = new ListMap.Node(k, v, underlying) + } + + this + case _ => + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala new file mode 100644 index 000000000000..719abd78e1e6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala @@ -0,0 +1,140 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** + * This class implements immutable sets using a list-based data structure. List set iterators and + * traversal methods visit elements in the order they were first inserted. + * + * Elements are stored internally in reversed insertion order, which means the newest element is at + * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and + * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which + * makes this collection suitable only for a small number of elements. + * + * Instances of `ListSet` represent empty sets; they can be either created by calling the + * constructor directly, or by applying the function `ListSet.empty`. + * + * @tparam A the type of the elements contained in this list set + * + * @define Coll ListSet + * @define coll list set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListSet[A] + extends AbstractSet[A] + with StrictOptimizedSetOps[A, ListSet, ListSet[A]] + with IterableFactoryDefaults[A, ListSet] + with DefaultSerializable { + + override protected[this] def className: String = "ListSet" + + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + + def contains(elem: A): Boolean = false + + def incl(elem: A): ListSet[A] = new Node(elem) + def excl(elem: A): ListSet[A] = this + + def iterator: scala.collection.Iterator[A] = { + var curr: ListSet[A] = this + var res: List[A] = Nil + while (!curr.isEmpty) { + res = curr.elem :: res + curr = curr.next + } + res.iterator + } + + protected def elem: A = throw new NoSuchElementException("elem of empty set") + protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") + + override def iterableFactory: IterableFactory[ListSet] = ListSet + + /** + * Represents an entry in the `ListSet`. + */ + protected class Node(override protected val elem: A) extends ListSet[A] { + + override def size = sizeInternal(this, 0) + override def knownSize: Int = -1 + @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = + if (n.isEmpty) acc + else sizeInternal(n.next, acc + 1) + + override def isEmpty: Boolean = false + + override def contains(e: A): Boolean = containsInternal(this, e) + + @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = + !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) + + override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) + + override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) + + @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + if (cur.isEmpty) acc.last + else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) + else removeInternal(k, cur.next, cur :: acc) + + override protected def next: ListSet[A] = ListSet.this + + override def last: A = elem + + override def init: ListSet[A] = next + } +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list set with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @define Coll ListSet + * @define coll list set + */ +@SerialVersionUID(3L) +object ListSet extends IterableFactory[ListSet] { + + def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] = + it match { + case ls: ListSet[E] => ls + case _ if it.knownSize == 0 => empty[E] + case _ => (newBuilder[E] ++= it).result() + } + + private object EmptyListSet extends ListSet[Any] { + override def knownSize: Int = 0 + } + private[collection] def emptyInstance: ListSet[Any] = EmptyListSet + + def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] + + def newBuilder[A]: Builder[A, ListSet[A]] = + new ImmutableBuilder[A, ListSet[A]](empty) { + def addOne(elem: A): this.type = { elems = elems + elem; this } + } +} From 2cb34e54f15ae01bcfc11fef5a89d848c3fa2c49 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 10:53:40 +0100 Subject: [PATCH 105/216] Add immutable Map to stdlib --- .../stdlib/collection/immutable/HashMap.scala | 2 +- .../stdlib/collection/immutable/Map.scala | 694 ++++++++++++++++++ 2 files changed, 695 insertions(+), 1 deletion(-) create mode 100644 tests/pos-special/stdlib/collection/immutable/Map.scala diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala index d2f144baa934..c364924db3a3 100644 --- a/tests/pos-special/stdlib/collection/immutable/HashMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -386,7 +386,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: else new HashMap(newRootNode) } - override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = { if (isEmpty) { this } else { diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala new file mode 100644 index 000000000000..6daad829bf55 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Map.scala @@ -0,0 +1,694 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.Map.Map4 +import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** Base type of immutable Maps */ +trait Map[K, +V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]] + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) +} + +/** Base trait of immutable Maps implementations + * + * @define coll immutable map + * @define Coll `immutable.Map` + */ +trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] { + + protected def coll: C with CC[K, V] + + /** Removes a key from this map, returning a new map. + * + * @param key the key to be removed + * @return a new map without a binding for ''key'' + */ + def removed(key: K): C + + /** Alias for `removed` */ + @`inline` final def - (key: K): C = removed(key) + + @deprecated("Use -- with an explicit collection", "2.13.0") + def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * $willForceEvaluation + * + * @param keys the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for `removedAll` */ + @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys) + + /** Creates a new map obtained by updating this map with a given key/value pair. + * @param key the key + * @param value the value + * @tparam V1 the type of the added value + * @return A new map with the new key/value mapping added to this map. + */ + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return A new map with the updated mapping with the key + */ + def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + + /** + * Alias for `updated` + * + * @param kv the key/value pair. + * @tparam V1 the type of the value in the key/value pair. + * @return A new map with the new binding added to this map. + */ + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + + /** This function transforms all the values of mappings contained + * in this map with function `f`. + * + * @param f A function over keys and values + * @return the updated map + */ + def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } + + override def keySet: Set[K] = new ImmutableKeySet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem + def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this + } + +} + +trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends MapOps[K, V, CC, C] + with collection.StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = { + var result: CC[K, V1] = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + + +/** + * $factoryInfo + * @define coll immutable map + * @define Coll `immutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory[Map] { + + @SerialVersionUID(3L) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + def get(key: K): Option[V] = underlying.get(key) + + override def default(key: K): V = defaultValue(key) + + override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory + + def iterator: Iterator[(K, V)] = underlying.iterator + + override def isEmpty: Boolean = underlying.isEmpty + + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = + new WithDefault(underlying.concat(xs), defaultValue) + + def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + + def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] = + it match { + case it: Iterable[_] if it.isEmpty => empty[K, V] + case m: Map[K, V] => m + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl + + @SerialVersionUID(3L) + private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def keysIterator: Iterator[Any] = Iterator.empty + override def valuesIterator: Iterator[Nothing] = Iterator.empty + def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) + def removed(key: Any): Map[Any, Nothing] = this + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match { + case m: immutable.Map[Any, V2] => m + case _ => super.concat(suffix) + } + } + + @SerialVersionUID(3L) + final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) + override def keysIterator: Iterator[K] = Iterator.single(key1) + override def valuesIterator: Iterator[V] = Iterator.single(value1) + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map1(key1, value) + else new Map2(key1, value1, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) Map.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = + if (pred((key1, value1)) != isFlipped) this else Map.empty + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] + else new Map1(key1, walue1) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 1 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map2Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map2Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure { + private[this] var i = 0 + override def hasNext: Boolean = i < 2 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map2(key1, value, key2, value2) + else if (key == key2) new Map2(key1, value1, key2, value) + else new Map3(key1, value1, key2, value2, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map1(key2, value2) + else if (key == key2) new Map1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1 = null.asInstanceOf[K] + var v1 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map2(key1, walue1, key2, walue2) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 2 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map3Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map3Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure { + private[this] var i = 0 + override def hasNext: Boolean = i < 3 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map3(key1, value, key2, value2, key3, value3) + else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) + else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) + else new Map4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map2(key2, value2, key3, value3) + else if (key == key2) new Map2(key1, value1, key3, value3) + else if (key == key3) new Map2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2 = null.asInstanceOf[K] + var v1, v2 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map3(key1, walue1, key2, walue2, key3, walue3) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 3 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) + extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + + override def size: Int = 4 + override def knownSize: Int = 4 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map4Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map4Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure { + private[this] var i = 0 + override def hasNext: Boolean = i < 4 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case 3 => nextResult(key4, value4) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) + else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2, k3 = null.asInstanceOf[K] + var v1, v2, v3 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1} + if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => new Map3(k1, v1, k2, v2, k3, v3) + case 4 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + val walue4 = f(key4, value4) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && + (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) + } + private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = + builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 4 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key4, value4) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] + +private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { + private[this] var elems: Map[K, V] @uncheckedCaptures = Map.empty + private[this] var switchedToHashMapBuilder: Boolean = false + private[this] var hashMapBuilder: HashMapBuilder[K, V] @uncheckedCaptures = _ + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) + else elems.getOrElse(key, value) + + override def clear(): Unit = { + elems = Map.empty + if (hashMapBuilder != null) { + hashMapBuilder.clear() + } + switchedToHashMapBuilder = false + } + + override def result(): Map[K, V] = + if (switchedToHashMapBuilder) hashMapBuilder.result() else elems + + def addOne(key: K, value: V): this.type = { + if (switchedToHashMapBuilder) { + hashMapBuilder.addOne(key, value) + } else if (elems.size < 4) { + elems = elems.updated(key, value) + } else { + // assert(elems.size == 4) + if (elems.contains(key)) { + elems = elems.updated(key, value) + } else { + switchedToHashMapBuilder = true + if (hashMapBuilder == null) { + hashMapBuilder = new HashMapBuilder + } + elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) + hashMapBuilder.addOne(key, value) + } + } + + this + } + + def addOne(elem: (K, V)) = addOne(elem._1, elem._2) + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = + if (switchedToHashMapBuilder) { + hashMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} From a1fc706f533bad88a5384170f872aad05944fb1a Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 10:59:59 +0100 Subject: [PATCH 106/216] Add Range classes to stdlib --- .../collection/immutable/NumericRange.scala | 509 +++++++++++++ .../stdlib/collection/immutable/Range.scala | 673 ++++++++++++++++++ 2 files changed, 1182 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/NumericRange.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Range.scala diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala new file mode 100644 index 000000000000..f26d9728e5ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala @@ -0,0 +1,509 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** `NumericRange` is a more generic version of the + * `Range` class which works with arbitrary types. + * It must be supplied with an `Integral` implementation of the + * range type. + * + * Factories for likely types include `Range.BigInt`, `Range.Long`, + * and `Range.BigDecimal`. `Range.Int` exists for completeness, but + * the `Int`-based `scala.Range` should be more performant. + * + * {{{ + * val r1 = Range(0, 100, 1) + * val veryBig = Int.MaxValue.toLong + 1 + * val r2 = Range.Long(veryBig, veryBig + 100, 1) + * assert(r1 sameElements r2.map(_ - veryBig)) + * }}} + * + * @define Coll `NumericRange` + * @define coll numeric range + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed class NumericRange[T]( + val start: T, + val end: T, + val step: T, + val isInclusive: Boolean +)(implicit + num: Integral[T] +) + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] + with Serializable { self => + + override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { + import scala.collection.convert._ + import impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) + case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) + case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + } + s.asInstanceOf[S with EfficientSplit] + } + + + /** Note that NumericRange must be invariant so that constructs + * such as "1L to 10 by 5" do not infer the range type as AnyVal. + */ + import num._ + + // See comment in Range for why this must be lazy. + override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) + override lazy val isEmpty: Boolean = ( + (num.gt(start, end) && num.gt(step, num.zero)) + || (num.lt(start, end) && num.lt(step, num.zero)) + || (num.equiv(start, end) && !isInclusive) + ) + override def last: T = + if (isEmpty) Nil.head + else locationAfterN(length - 1) + override def init: NumericRange[T] = + if (isEmpty) Nil.init + else new NumericRange(start, end - step, step, isInclusive) + + override def head: T = if (isEmpty) Nil.head else start + override def tail: NumericRange[T] = + if (isEmpty) Nil.tail + else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) + else new NumericRange.Exclusive(start + step, end, step) + + /** Create a new range with the start and end values of this range and + * a new `step`. + */ + def by(newStep: T): NumericRange[T] = copy(start, end, newStep) + + + /** Create a copy of this range. + */ + def copy(start: T, end: T, step: T): NumericRange[T] = + new NumericRange(start, end, step, isInclusive) + + @throws[IndexOutOfBoundsException] + def apply(idx: Int): T = { + if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})") + else locationAfterN(idx) + } + + override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { + var count = 0 + var current = start + while (count < length) { + f(current) + current += step + count += 1 + } + } + + // TODO: these private methods are straight copies from Range, duplicated + // to guard against any (most likely illusory) performance drop. They should + // be eliminated one way or another. + + // Tests whether a number is within the endpoints, without testing + // whether it is a member of the sequence (i.e. when step > 1.) + private def isWithinBoundaries(elem: T) = !isEmpty && ( + (step > zero && start <= elem && elem <= last ) || + (step < zero && last <= elem && elem <= start) + ) + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private def locationAfterN(n: Int): T = start + (step * fromInt(n)) + + private def crossesTheEndAfterN(n: Int): Boolean = { + // if we're sure that subtraction in the context of T won't overflow, we use this function + // to calculate the length of the range + def unsafeRangeLength(r: NumericRange[T]): T = { + val diff = num.minus(r.end, r.start) + val quotient = num.quot(diff, r.step) + val remainder = num.rem(diff, r.step) + if (!r.isInclusive && num.equiv(remainder, num.zero)) + num.max(quotient, num.zero) + else + num.max(num.plus(quotient, num.one), num.zero) + } + + // detects whether value can survive a bidirectional trip to -and then from- Int. + def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) + + val stepIsInTheSameDirectionAsStartToEndVector = + (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) + + if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 + + val sameSign = num.equiv(num.sign(start), num.sign(end)) + + if (sameSign) { // subtraction is safe + val len = unsafeRangeLength(this) + if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) + } else { + // split to two ranges, which subtraction is safe in both of them (around zero) + val stepsRemainderToZero = num.rem(start, step) + val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) + val closestToZero = if (walksOnZero) -step else stepsRemainderToZero + + /* + When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, + so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). + Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. + After performing such operation, there are some elements remaining in between and around zero, + which their length is represented by carry. + */ + val (l: NumericRange[T], r: NumericRange[T], carry: Int) = + if (num.lt(start, num.zero)) { + if (walksOnZero) { + val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) + } else { + (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) + } + } else { + if (walksOnZero) { + val twoStepsAfterZero = num.times(step, num.fromInt(2)) + (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) + } else { + val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) + } + } + + val leftLength = unsafeRangeLength(l) + val rightLength = unsafeRangeLength(r) + + // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, + // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) + if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) + n - num.toInt(leftLength) - carry >= num.toInt(rightLength) + else + num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) + } + } + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private def newEmptyRange(value: T) = NumericRange(value, value, step) + + override def take(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (crossesTheEndAfterN(n)) this + else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) + } + + override def drop(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) this + else if (crossesTheEndAfterN(n)) newEmptyRange(end) + else copy(locationAfterN(n), end, step) + } + + override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) + + override def reverse: NumericRange[T] = + if (isEmpty) this + else { + val newStep = -step + if (num.sign(newStep) == num.sign(step)) { + throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") + } else new NumericRange.Inclusive(last, start, newStep) + } + + import NumericRange.defaultOrdering + + override def min[T1 >: T](implicit ord: Ordering[T1]): T = + // We can take the fast path: + // - If the Integral of this NumericRange is also the requested Ordering + // (Integral <: Ordering). This can happen for custom Integral types. + // - The Ordering is the default Ordering of a well-known Integral type. + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) head + else last + } else super.min(ord) + + override def max[T1 >: T](implicit ord: Ordering[T1]): T = + // See comment for fast path in min(). + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) last + else head + } else super.max(ord) + + // a well-typed contains method. + def containsTyped(x: T): Boolean = + isWithinBoundaries(x) && (((x - start) % step) == zero) + + override def contains[A1 >: T](x: A1): Boolean = + try containsTyped(x.asInstanceOf[T]) + catch { case _: ClassCastException => false } + + override def sum[B >: T](implicit num: Numeric[B]): B = { + if (isEmpty) num.zero + else if (size == 1) head + else { + // If there is no overflow, use arithmetic series formula + // a + ... (n terms total) ... + b = n*(a+b)/2 + if ((num eq scala.math.Numeric.IntIsIntegral)|| + (num eq scala.math.Numeric.ShortIsIntegral)|| + (num eq scala.math.Numeric.ByteIsIntegral)|| + (num eq scala.math.Numeric.CharIsIntegral)) { + // We can do math with no overflow in a Long--easy + val exact = (size * ((num toLong head) + (num toInt last))) / 2 + num fromInt exact.toInt + } + else if (num eq scala.math.Numeric.LongIsIntegral) { + // Uh-oh, might be overflow, so we have to divide before we overflow. + // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying + val a = head.toLong + val b = last.toLong + val ans = + if ((size & 1) == 0) (size / 2) * (a + b) + else size * { + // Sum is even, but we might overflow it, so divide in pieces and add back remainder + val ha = a/2 + val hb = b/2 + ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 + } + ans.asInstanceOf[B] + } + else if ((num eq scala.math.Numeric.BigIntIsIntegral) || + (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { + // No overflow, so we can use arithmetic series formula directly + // (not going to worry about running out of memory) + val numAsIntegral = num.asInstanceOf[Integral[B]] + import numAsIntegral._ + ((num fromInt size) * (head + last)) / (num fromInt 2) + } + else { + // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) + if (isEmpty) num.zero + else { + var acc = num.zero + var i = head + var idx = 0 + while(idx < length) { + acc = num.plus(acc, i) + i = i + step + idx = idx + 1 + } + acc + } + } + } + } + + override lazy val hashCode: Int = super.hashCode() + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def equals(other: Any): Boolean = other match { + case x: NumericRange[_] => + (x canEqual this) && (length == x.length) && ( + (isEmpty) || // all empty sequences are equal + (start == x.start && last == x.last) // same length and same endpoints implies equality + ) + case _ => + super.equals(other) + } + + override def toString: String = { + val empty = if (isEmpty) "empty " else "" + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + s"${empty}NumericRange $start $preposition $end$stepped" + } + + override protected[this] def className = "NumericRange" +} + +/** A companion object for numeric ranges. + * @define Coll `NumericRange` + * @define coll numeric range + */ +object NumericRange { + private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { + def FAIL(boundary: T, step: T): Unit = { + val msg = boundary match { + case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" + case _ => "Precision" + } + throw new IllegalArgumentException( + s"$msg inadequate to represent steps of size $step near $boundary" + ) + } + if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) + if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) + } + + /** Calculates the number of elements in a range given start, end, step, and + * whether or not it is inclusive. Throws an exception if step == 0 or + * the number of elements exceeds the maximum Int. + */ + def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { + val zero = num.zero + val upward = num.lt(start, end) + val posStep = num.gt(step, zero) + + if (step == zero) throw new IllegalArgumentException("step cannot be 0.") + else if (start == end) if (isInclusive) 1 else 0 + else if (upward != posStep) 0 + else { + /* We have to be frightfully paranoid about running out of range. + * We also can't assume that the numbers will fit in a Long. + * We will assume that if a > 0, -a can be represented, and if + * a < 0, -a+1 can be represented. We also assume that if we + * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). + * And we assume that numbers wrap rather than cap when they overflow. + */ + // Check whether we can short-circuit by deferring to Int range. + val startint = num.toInt(start) + if (start == num.fromInt(startint)) { + val endint = num.toInt(end) + if (end == num.fromInt(endint)) { + val stepint = num.toInt(step) + if (step == num.fromInt(stepint)) { + return { + if (isInclusive) Range.inclusive(startint, endint, stepint).length + else Range (startint, endint, stepint).length + } + } + } + } + // If we reach this point, deferring to Int failed. + // Numbers may be big. + if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { + bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) + } + val one = num.one + val limit = num.fromInt(Int.MaxValue) + def check(t: T): T = + if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") + else t + // If the range crosses zero, it might overflow when subtracted + val startside = num.sign(start) + val endside = num.sign(end) + num.toInt{ + if (num.gteq(num.times(startside, endside), zero)) { + // We're sure we can subtract these numbers. + // Note that we do not use .rem because of different conventions for Long and BigInt + val diff = num.minus(end, start) + val quotient = check(num.quot(diff, step)) + val remainder = num.minus(diff, num.times(quotient, step)) + if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) + } + else { + // We might not even be able to subtract these numbers. + // Jump in three pieces: + // * start to -1 or 1, whichever is closer (waypointA) + // * one step, which will take us at least to 0 (ends at waypointB) + // * (except with really small numbers) + // * there to the end + val negone = num.fromInt(-1) + val startlim = if (posStep) negone else one + //Use start value if the start value is closer to zero than startlim + // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 + val startdiff = { + if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) + start + else + num.minus(startlim, start) + } + val startq = check(num.quot(startdiff, step)) + val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) + val waypointB = num.plus(waypointA, step) + check { + if (num.lt(waypointB, end) != upward) { + // No last piece + if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) + else num.plus(startq, one) + } + else { + // There is a last piece + val enddiff = num.minus(end,waypointB) + val endq = check(num.quot(enddiff, step)) + val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) + // Now we have to tally up all the pieces + // 1 for the initial value + // startq steps to waypointA + // 1 step to waypointB + // endq steps to the end (one less if !isInclusive and last==end) + num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) + } + } + } + } + } + } + + @SerialVersionUID(3L) + class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, true) { + override def copy(start: T, end: T, step: T): Inclusive[T] = + NumericRange.inclusive(start, end, step) + + def exclusive: Exclusive[T] = NumericRange(start, end, step) + } + + @SerialVersionUID(3L) + class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, false) { + override def copy(start: T, end: T, step: T): Exclusive[T] = + NumericRange(start, end, step) + + def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) + } + + def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = + new Exclusive(start, end, step) + def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = + new Inclusive(start, end, step) + + private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( + Numeric.BigIntIsIntegral -> Ordering.BigInt, + Numeric.IntIsIntegral -> Ordering.Int, + Numeric.ShortIsIntegral -> Ordering.Short, + Numeric.ByteIsIntegral -> Ordering.Byte, + Numeric.CharIsIntegral -> Ordering.Char, + Numeric.LongIsIntegral -> Ordering.Long, + Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal + ) + + @SerialVersionUID(3L) + private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { + import num.mkNumericOps + + private[this] var _hasNext = !self.isEmpty + private[this] var _next: T @uncheckedCaptures = self.start + private[this] val lastElement: T = if (_hasNext) self.last else self.start + override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 + def hasNext: Boolean = _hasNext + def next(): T = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = num.plus(value, self.step) + value + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala new file mode 100644 index 000000000000..459591d1a9cb --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Range.scala @@ -0,0 +1,673 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.RangeStepper +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking + +/** The `Range` class represents integer values in range + * ''[start;end)'' with non-zero step value `step`. + * It's a special case of an indexed sequence. + * For example: + * + * {{{ + * val r1 = 0 until 10 + * val r2 = r1.start until r1.end by r1.step + 1 + * println(r2.length) // = 5 + * }}} + * + * Ranges that contain more than `Int.MaxValue` elements can be created, but + * these overfull ranges have only limited capabilities. Any method that + * could require a collection of over `Int.MaxValue` length to be created, or + * could be asked to index beyond `Int.MaxValue` elements will throw an + * exception. Overfull ranges can safely be reduced in size by changing + * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, + * `equals`, and access to the ends of the range (`head`, `last`, `tail`, + * `init`) are also permitted on overfull ranges. + * + * @param start the start of this range. + * @param end the end of the range. For exclusive ranges, e.g. + * `Range(0,3)` or `(0 until 3)`, this is one + * step past the last one in the range. For inclusive + * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, + * it may be in the range if it is not skipped by the step size. + * To find the last element inside a non-empty range, + * use `last` instead. + * @param step the step for the range. + * + * @define coll range + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define doesNotUseBuilders + * '''Note:''' this method does not use builders to construct a new range, + * and its complexity is O(1). + */ +@SerialVersionUID(3L) +sealed abstract class Range( + val start: Int, + val end: Int, + val step: Int +) + extends AbstractSeq[Int] + with IndexedSeq[Int] + with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with IterableFactoryDefaults[Int, IndexedSeq] + with Serializable { range => + + final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) + + override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = new RangeStepper(start, step, 0, length) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private[this] def gap = end.toLong - start.toLong + private[this] def isExact = gap % step == 0 + private[this] def hasStub = isInclusive || !isExact + private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) + + def isInclusive: Boolean + + final override val isEmpty: Boolean = ( + (start > end && step > 0) + || (start < end && step < 0) + || (start == end && !isInclusive) + ) + + private[this] val numRangeElements: Int = { + if (step == 0) throw new IllegalArgumentException("step cannot be 0.") + else if (isEmpty) 0 + else { + val len = longLength + if (len > scala.Int.MaxValue) -1 + else len.toInt + } + } + + final def length = if (numRangeElements < 0) fail() else numRangeElements + + // This field has a sensible value only for non-empty ranges + private[this] val lastElement = step match { + case 1 => if (isInclusive) end else end-1 + case -1 => if (isInclusive) end else end+1 + case _ => + val remainder = (gap % step).toInt + if (remainder != 0) end - remainder + else if (isInclusive) end + else end - step + } + + /** The last element of this range. This method will return the correct value + * even if there are too many elements to iterate over. + */ + final override def last: Int = + if (isEmpty) throw Range.emptyRangeError("last") else lastElement + final override def head: Int = + if (isEmpty) throw Range.emptyRangeError("head") else start + + /** Creates a new range containing all the elements of this range except the last one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the last one. + */ + final override def init: Range = + if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) + + /** Creates a new range containing all the elements of this range except the first one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the first one. + */ + final override def tail: Range = { + if (isEmpty) throw Range.emptyRangeError("tail") + if (numRangeElements == 1) newEmptyRange(end) + else if(isInclusive) new Range.Inclusive(start + step, end, step) + else new Range.Exclusive(start + step, end, step) + } + + override def map[B](f: Int => B): IndexedSeq[B] = { + validateMaxLength() + super.map(f) + } + + final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = + if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) + + /** Create a new range with the `start` and `end` values of this range and + * a new `step`. + * + * @return a new range with a different step + */ + final def by(step: Int): Range = copy(start, end, step) + + // Check cannot be evaluated eagerly because we have a pattern where + // ranges are constructed like: "x to y by z" The "x to y" piece + // should not trigger an exception. So the calculation is delayed, + // which means it will not fail fast for those cases where failing was + // correct. + private[this] def validateMaxLength(): Unit = { + if (numRangeElements < 0) + fail() + } + private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) + private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") + + @throws[IndexOutOfBoundsException] + final def apply(idx: Int): Int = { + validateMaxLength() + if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})") + else start + (step * idx) + } + + /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { + // Implementation chosen on the basis of favorable microbenchmarks + // Note--initialization catches step == 0 so we don't need to here + if (!isEmpty) { + var i = start + while (true) { + f(i) + if (i == lastElement) return + i += step + } + } + } + + override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos >= from) pos else -1 + case _ => super.indexOf(elem, from) + } + + override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos <= end) pos else -1 + case _ => super.lastIndexOf(elem, end) + } + + private[this] def posOf(i: Int): Int = + if (contains(i)) (i - start) / step else -1 + + override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match { + case other: Range => + (this.length : @annotation.switch) match { + case 0 => other.isEmpty + case 1 => other.length == 1 && this.start == other.start + case n => other.length == n && ( + (this.start == other.start) + && (this.step == other.step) + ) + } + case _ => super.sameElements(that) + } + + /** Creates a new range containing the first `n` elements of this range. + * + * @param n the number of elements to take. + * @return a new range consisting of `n` first elements. + */ + final override def take(n: Int): Range = + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (n >= numRangeElements && numRangeElements >= 0) this + else { + // May have more than Int.MaxValue elements in range (numRangeElements < 0) + // but the logic is the same either way: take the first n + new Range.Inclusive(start, locationAfterN(n - 1), step) + } + + /** Creates a new range containing all the elements of this range except the first `n` elements. + * + * @param n the number of elements to drop. + * @return a new range consisting of all the elements of this range except `n` first elements. + */ + final override def drop(n: Int): Range = + if (n <= 0 || isEmpty) this + else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) + else { + // May have more than Int.MaxValue elements (numRangeElements < 0) + // but the logic is the same either way: go forwards n steps, keep the rest + copy(locationAfterN(n), end, step) + } + + /** Creates a new range consisting of the last `n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def takeRight(n: Int): Range = { + if (n <= 0) newEmptyRange(start) + else if (numRangeElements >= 0) drop(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last + val x = y - step.toLong*(n-1) + if ((step > 0 && x < start) || (step < 0 && x > start)) this + else Range.inclusive(x.toInt, y, step) + } + } + + /** Creates a new range consisting of the initial `length - n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def dropRight(n: Int): Range = { + if (n <= 0) this + else if (numRangeElements >= 0) take(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last - step.toInt*n + if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) + else Range.inclusive(start, y.toInt, step) + } + } + + // Advance from the start while we meet the given test + private[this] def argTakeWhile(p: Int => Boolean): Long = { + if (isEmpty) start + else { + var current = start + val stop = last + while (current != stop && p(current)) current += step + if (current != stop || !p(current)) current + else current.toLong + step + } + } + + final override def takeWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop==start) newEmptyRange(start) + else { + val x = (stop - step).toInt + if (x == last) this + else Range.inclusive(start, x, step) + } + } + + final override def dropWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop == start) this + else { + val x = (stop - step).toInt + if (x == last) newEmptyRange(last) + else Range.inclusive(x + step, last, step) + } + } + + final override def span(p: Int => Boolean): (Range, Range) = { + val border = argTakeWhile(p) + if (border == start) (newEmptyRange(start), this) + else { + val x = (border - step).toInt + if (x == last) (this, newEmptyRange(last)) + else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) + } + } + + /** Creates a new range containing the elements starting at `from` up to but not including `until`. + * + * $doesNotUseBuilders + * + * @param from the element at which to start + * @param until the element at which to end (not included in the range) + * @return a new range consisting of a contiguous interval of values in the old range + */ + final override def slice(from: Int, until: Int): Range = + if (from <= 0) take(until) + else if (until >= numRangeElements && numRangeElements >= 0) drop(from) + else { + val fromValue = locationAfterN(from) + if (from >= until) newEmptyRange(fromValue) + else Range.inclusive(fromValue, locationAfterN(until-1), step) + } + + // Overridden only to refine the return type + final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) + + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private[this] def locationAfterN(n: Int) = start + (step * n) + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) + + /** Returns the reverse of this range. + */ + final override def reverse: Range = + if (isEmpty) this + else new Range.Inclusive(last, start, -step) + + /** Make range inclusive. + */ + final def inclusive: Range = + if (isInclusive) this + else new Range.Inclusive(start, end, step) + + final def contains(x: Int): Boolean = { + if (x == end && !isInclusive) false + else if (step > 0) { + if (x < start || x > end) false + else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) + } + else { + if (x < end || x > start) false + else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) + } + } + /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ + override final def contains[B >: Int](elem: B): Boolean = elem match { + case i: Int => this.contains(i) + case _ => super.contains(elem) + } + + final override def sum[B >: Int](implicit num: Numeric[B]): Int = { + if (num eq scala.math.Numeric.IntIsIntegral) { + // this is normal integer range with usual addition. arithmetic series formula can be used + if (isEmpty) 0 + else if (size == 1) head + else ((size * (head.toLong + last)) / 2).toInt + } else { + // user provided custom Numeric, we cannot rely on arithmetic series formula + if (isEmpty) num.toInt(num.zero) + else { + var acc = num.zero + var i = head + while (true) { + acc = num.plus(acc, i) + if (i == lastElement) return num.toInt(acc) + i = i + step + } + 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing + } + } + } + + final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) head + else last + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) last + else head + } else super.min(ord) + + final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) last + else head + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) head + else last + } else super.max(ord) + + override def tails: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.drop(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + + override def inits: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.dropRight(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + final override def equals(other: Any): Boolean = other match { + case x: Range => + // Note: this must succeed for overfull ranges (length > Int.MaxValue) + if (isEmpty) x.isEmpty // empty sequences are equal + else // this is non-empty... + x.nonEmpty && start == x.start && { // ...so other must contain something and have same start + val l0 = last + (l0 == x.last && ( // And same end + start == l0 || step == x.step // And either the same step, or not take any steps + )) + } + case _ => + super.equals(other) + } + + final override def hashCode: Int = + if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) + else super.hashCode + + final override def toString: String = { + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" + s"${prefix}Range $start $preposition $end$stepped" + } + + override protected[this] def className = "Range" + + override def distinct: Range = this + + override def grouped(size: Int): Iterator[Range] = { + require(size >= 1, f"size=$size%d, but size must be positive") + if (isEmpty) { + Iterator.empty + } else { + val s = size + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = Range.this.length > i + override def next() = + if (hasNext) { + val x = Range.this.slice(i, i + s) + i += s + x + } else { + Iterator.empty.next() + } + } + } + } + + override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = + if (ord eq Ordering.Int) { + if (step > 0) { + this + } else { + reverse + } + } else { + super.sorted(ord) + } +} + +/** + * Companion object for ranges. + * @define Coll `Range` + * @define coll range + */ +object Range { + + /** Counts the number of range elements. + * precondition: step != 0 + * If the size of the range exceeds Int.MaxValue, the + * result will be negative. + */ + def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { + if (step == 0) + throw new IllegalArgumentException("step cannot be 0.") + + val isEmpty = + if (start == end) !isInclusive + else if (start < end) step < 0 + else step > 0 + + if (isEmpty) 0 + else { + // Counts with Longs so we can recognize too-large ranges. + val gap: Long = end.toLong - start.toLong + val jumps: Long = gap / step + // Whether the size of this range is one larger than the + // number of full-sized jumps. + val hasStub = isInclusive || (gap % step != 0) + val result: Long = jumps + ( if (hasStub) 1 else 0 ) + + if (result > scala.Int.MaxValue) -1 + else result.toInt + } + } + def count(start: Int, end: Int, step: Int): Int = + count(start, end, step, isInclusive = false) + + /** Make a range from `start` until `end` (exclusive) with given step value. + * @note step != 0 + */ + def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) + + /** Make a range from `start` until `end` (exclusive) with step value 1. + */ + def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) + + /** Make an inclusive range from `start` to `end` with given step value. + * @note step != 0 + */ + def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) + + /** Make an inclusive range from `start` to `end` with step value 1. + */ + def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) + + @SerialVersionUID(3L) + final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = true + } + + @SerialVersionUID(3L) + final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = false + } + + // BigInt and Long are straightforward generic ranges. + object BigInt { + def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) + def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) + } + + object Long { + def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) + def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) + } + + // BigDecimal uses an alternative implementation of Numeric in which + // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for + // details. The intention is for it to throw an exception anytime + // imprecision or surprises might result from anything, although this may + // not yet be fully implemented. + object BigDecimal { + implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral + + def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = + NumericRange(start, end, step) + def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = + NumericRange.inclusive(start, end, step) + } + + // As there is no appealing default step size for not-really-integral ranges, + // we offer a partially constructed object. + class Partial[T, U](private val f: T -> U) extends AnyVal { + def by(x: T): U = f(x) + override def toString = "Range requires step" + } + + // Illustrating genericity with Int Range, which should have the same behavior + // as the original Range class. However we leave the original Range + // indefinitely, for performance and because the compiler seems to bootstrap + // off it and won't do so with our parameterized version without modifications. + object Int { + def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) + def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) + } + + private def emptyRangeError(what: String): Throwable = + new NoSuchElementException(what + " on empty Range") +} + +/** + * @param lastElement The last element included in the Range + * @param initiallyEmpty Whether the Range was initially empty or not + */ +@SerialVersionUID(3L) +private class RangeIterator( + start: Int, + step: Int, + lastElement: Int, + initiallyEmpty: Boolean +) extends AbstractIterator[Int] with Serializable { + private[this] var _hasNext: Boolean = !initiallyEmpty + private[this] var _next: Int = start + override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 + def hasNext: Boolean = _hasNext + @throws[NoSuchElementException] + def next(): Int = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = value + step + value + } + + override def drop(n: Int): Iterator[Int] = { + if (n > 0) { + val longPos = _next.toLong + step * n + if (step > 0) { + _next = Math.min(lastElement, longPos).toInt + _hasNext = longPos <= lastElement + } + else if (step < 0) { + _next = Math.max(lastElement, longPos).toInt + _hasNext = longPos >= lastElement + } + } + this + } +} From 5817d400c12c96c8147b2feb1bd57127803e1809 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 11:03:38 +0100 Subject: [PATCH 107/216] Add immutable Queue to stdlib --- .../stdlib/collection/immutable/Queue.scala | 218 ++++++++++++++++++ 1 file changed, 218 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/Queue.scala diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala new file mode 100644 index 000000000000..929c79ce588a --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala @@ -0,0 +1,218 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{Builder, ListBuffer} +import language.experimental.captureChecking + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. + * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the + * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. + * + * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case + * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, + * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] + * section on `Immutable Queues` for more information. + * + * @define Coll `immutable.Queue` + * @define coll immutable queue + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ + +sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Queue] = Queue + + /** Returns the `n`-th element of this queue. + * The first element is at position `0`. + * + * @param n index of the element to return + * @return the element at position `n` in this queue. + * @throws NoSuchElementException if the queue is too short. + */ + override def apply(n: Int): A = { + def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) + + var index = 0 + var curr = out + + while (index < n && curr.nonEmpty) { + index += 1 + curr = curr.tail + } + + if (index == n) { + if (curr.nonEmpty) curr.head + else if (in.nonEmpty) in.last + else indexOutOfRange() + } else { + val indexFromBack = n - index + val inLength = in.length + if (indexFromBack >= inLength) indexOutOfRange() + else in(inLength - indexFromBack - 1) + } + } + + /** Returns the elements in the list as an iterator + */ + override def iterator: Iterator[A] = out.iterator.concat(in.reverse) + + /** Checks if the queue is empty. + * + * @return true, iff there is no element in the queue. + */ + override def isEmpty: Boolean = in.isEmpty && out.isEmpty + + override def head: A = + if (out.nonEmpty) out.head + else if (in.nonEmpty) in.last + else throw new NoSuchElementException("head on empty queue") + + override def tail: Queue[A] = + if (out.nonEmpty) new Queue(in, out.tail) + else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) + else throw new NoSuchElementException("tail on empty queue") + + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + + /* This is made to avoid inefficient implementation of iterator. */ + override def forall(p: A => Boolean): Boolean = + in.forall(p) && out.forall(p) + + /* This is made to avoid inefficient implementation of iterator. */ + override def exists(p: A => Boolean): Boolean = + in.exists(p) || out.exists(p) + + override protected[this] def className = "Queue" + + /** Returns the length of the queue. */ + override def length: Int = in.length + out.length + + override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) + + override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) + + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = { + val newIn = that match { + case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) + case that: List[B] => that reverse_::: this.in + case _ => + var result: List[B] = this.in + val iter = that.iterator + while (iter.hasNext) { + result = iter.next() :: result + } + result + } + if (newIn eq this.in) this else new Queue[B](newIn, this.out) + } + + /** Creates a new queue with element added at the end + * of the old queue. + * + * @param elem the element to insert + */ + def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") + @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) + + /** Returns a tuple with the first element in the queue, + * and a new queue with this element removed. + * + * @throws NoSuchElementException + * @return the first element of the queue. + */ + def dequeue: (A, Queue[A]) = out match { + case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) + case x :: xs => (x, new Queue(in, xs)) + case _ => throw new NoSuchElementException("dequeue on empty queue") + } + + /** Optionally retrieves the first element and a queue of the remaining elements. + * + * @return A tuple of the first element of the queue, and a new queue with this element removed. + * If the queue is empty, `None` is returned. + */ + def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @throws NoSuchElementException + * @return the first element. + */ + def front: A = head + + /** Returns a string representation of this queue. + */ + override def toString(): String = mkString("Queue(", ", ", ")") +} + +/** $factoryInfo + * @define Coll `immutable.Queue` + * @define coll immutable queue + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + def newBuilder[sealed A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + + def from[A](source: IterableOnce[A]^): Queue[A] = source match { + case q: Queue[A] => q + case _ => + val list = List.from(source) + if (list.isEmpty) empty + else new Queue(Nil, list) + } + + def empty[A]: Queue[A] = EmptyQueue + override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) + + private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } +} From c16afe52321a72b385fa5d434570135560f614a7 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 11:43:09 +0100 Subject: [PATCH 108/216] Add immutable RedBlackTree to stdlib --- .../collection/immutable/RedBlackTree.scala | 1234 +++++++++++++++++ 1 file changed, 1234 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala new file mode 100644 index 000000000000..5fbc927d7a21 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala @@ -0,0 +1,1234 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.meta.{getter, setter} +import scala.annotation.tailrec +import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. + * + * Implementation note: since efficiency is important for data structures this implementation + * uses `null` to represent empty trees. This also means pattern matching cannot + * easily be used. The API represented by the RedBlackTree object tries to hide these + * optimizations behind a reasonably clean API. + */ +private[collection] object RedBlackTree { + + def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + + def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null + def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { + case null => None + case tree => Some(tree.value) + } + + @tailrec + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree + } + private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { + def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) tree + else if (tree.isMutable) { + val res = tree.mutableBlack.makeImmutable + releaseFence() + res + } else tree.black + } + /** Create a new balanced tree where `newLeft` replaces `tree.left`. + * tree and newLeft are never null */ + protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + //Note - unlike the immutable trees we can't consider tree.left eq newLeft + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.mutableBlack + val resultRight = tree.mutableBlackWithLeft(newLeft_right) + + newLeft.mutableWithLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left) + val resultRight = tree.mutableBlackWithLeft(newLeft_right_right) + + newLeft_right.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. + * tree and newRight are never null */ + protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + //Note - unlike the immutable trees we can't consider tree.right eq newRight + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + + val resultLeft = tree.mutableBlackWithRight(newRight_left.left) + val resultRight = newRight.mutableBlackWithLeft(newRight_left.right) + + newRight_left.mutableWithLeftRight(resultLeft, resultRight) + + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + + val resultLeft = tree.mutableBlackWithRight(newRight_left) + val resultRight = newRight_right.mutableBlack + + newRight.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } + private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = + if (tree eq null) { + mutableRedTree(k, (), null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k)) + else tree + } + } + private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = + if (tree eq null) { + mutableRedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree.mutableWithV(v) + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) + else tree.mutableWithV(v) + } + } + + def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) + def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) + def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { + case (Some(from), Some(until)) => this.range(tree, from, until) + case (Some(from), None) => this.from(tree, from) + case (None, Some(until)) => this.until(tree, until) + case (None, None) => tree + } + def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) + def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) + def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) + def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) + + def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) + def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) + def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) + + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.left ne null) result = result.left + result + } + def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.right ne null) result = result.right + result + } + + def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _tail(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tl = tree.left + if (tl eq null) tree.right + else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right) + else tree.redWithLeft(_tail(tree.left)) + } + blacken(_tail(tree)) + } + + def init[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _init(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tr = tree.right + if (tr eq null) tree.left + else if (tr.isBlack) balRight(tree, tree.left, _init(tr)) + else tree.redWithRight(_init(tr)) + } + blacken(_init(tree)) + } + + /** + * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. + */ + def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp == 0) tree + else if (cmp < 0) { + val l = minAfter(tree.left, x) + if (l != null) l else tree + } else minAfter(tree.right, x) + } + + /** + * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp <= 0) maxBefore(tree.left, x) + else { + val r = maxBefore(tree.right, x) + if (r != null) r else tree + } + } + + def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) + + def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) + } + def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) + } + def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b)) + } + + private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = { + if (tree.left ne null) _foreach(tree.left, f) + f((tree.key, tree.value)) + if (tree.right ne null) _foreach(tree.right, f) + } + + def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) + + private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + if (tree.left ne null) _foreachKey(tree.left, f) + f((tree.key)) + if (tree.right ne null) _foreachKey(tree.right, f) + } + + def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) + + private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + if (tree.left ne null) _foreachEntry(tree.left, f) + f(tree.key, tree.value) + if (tree.right ne null) _foreachEntry(tree.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) + + @tailrec + def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + val count = this.count(tree.left) + if (n < count) nth(tree.left, n) + else if (n > count) nth(tree.right, n - count - 1) + else tree + } + + def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack + + @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed + @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack + + private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + + // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` + // for building subtrees. Use `blacken` instead when building top-level trees. + private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = + if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t + + private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { + val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) + new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) + } + + /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ + private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + if (tree.left eq newLeft) tree + else { + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.black + val resultRight = tree.blackWithLeft(newLeft_right) + + newLeft.withLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.blackWithRight(newLeft_right.left) + val resultRight = tree.blackWithLeft(newLeft_right_right) + + newLeft_right.withLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. */ + private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + if (tree.right eq newRight) tree + else { + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + val resultLeft = tree.blackWithRight(newRight_left.left) + val resultRight = newRight.blackWithLeft(newRight_left.right) + + newRight_left.withLeftRight(resultLeft, resultRight) + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + val resultLeft = tree.blackWithRight(newRight_left) + val resultRight = newRight_right.black + + newRight.withLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } + + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + if (overwrite) + tree.withV(v) + else tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + balanceLeft(tree, upd(tree.left, k, v, overwrite)) + else if (cmp > 0) + balanceRight(tree, upd(tree.right, k, v, overwrite)) + else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) + tree.withV(v) + else tree + } + private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else { + val rank = count(tree.left) + 1 + if (idx < rank) + balanceLeft(tree, updNth(tree.left, idx, k, v)) + else if (idx > rank) + balanceRight(tree, updNth(tree.right, idx - rank, k, v)) + else tree + } + + private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) + val newLeft = doFrom(tree.left, from) + if (newLeft eq tree.left) tree + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, tree.right) + } + private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(to, tree.key)) return doTo(tree.left, to) + val newRight = doTo(tree.right, to) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join (tree.left, tree.key, tree.value, newRight) + } + private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) + val newRight = doUntil(tree.right, until) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join(tree.left, tree.key, tree.value, newRight) + } + + private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) + if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) + val newLeft = doFrom(tree.left, from) + val newRight = doUntil(tree.right, until) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, newRight) + } + + private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) tree + else if(n >= tree.count) null + else { + val l = count(tree.left) + if(n > l) doDrop(tree.right, n-l-1) + else if(n == l) join(null, tree.key, tree.value, tree.right) + else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) + } + + private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) null + else if(n >= tree.count) tree + else { + val l = count(tree.left) + if(n <= l) doTake(tree.left, n) + else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) + else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) + } + + private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = + if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null + else if((from <= 0) && (until >= tree.count)) tree + else { + val l = count(tree.left) + if(until <= l) doSlice(tree.left, from, until) + else if(from > l) doSlice(tree.right, from-l-1, until-l-1) + else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1)) + } + + /* + * Forcing direct fields access using the @`inline` annotation helps speed up + * various operations (especially smallest/greatest and update/delete). + * + * Unfortunately the direct field access is not guaranteed to work (but + * works on the current implementation of the Scala compiler). + * + * An alternative is to implement the these classes using plain old Java code... + * + * Mutability + * This implementation encodes both mutable and immutable trees. + * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations + * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk + * API such as filter or ++ + * + * Mutable trees are only used within the confines of this bulk operation and not shared + * Mutable trees may transition to become immutable by calling beforePublish + * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing) + * + * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable + * trees the entire transitive subtree is immutable + * + * Colour, mutablity and size encoding + * The colour of the Tree, its mutablity and size are all encoded in the _count field + * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without + * additional allocation + * The mutable trees always have bits 0 .. 30 (inclusive) set to 0 + * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree + * + * Naming + * All of the methods that can yield a mutable result have "mutable" on their name, and generally there + * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when + * reviewing changes. e.g. + * def upd(...) will update an immutable Tree, producing an immutable Tree + * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree + * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree + * + */ + private[immutable] final class Tree[A, +B]( + @(`inline` @getter @setter) private var _key: A, + @(`inline` @getter @setter) private var _value: AnyRef, + @(`inline` @getter @setter) private var _left: Tree[A, _], + @(`inline` @getter @setter) private var _right: Tree[A, _], + @(`inline` @getter @setter) private var _count: Int) + { + @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 + // read only APIs + @`inline` private[RedBlackTree] final def count = { + //devTimeAssert((_count & 0x7FFFFFFF) != 0) + _count & colourMask + } + //retain the colour, and mark as mutable + @`inline` private def mutableRetainingColour = _count & colourBit + + //inlined here to avoid outer object null checks + @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + @`inline` private[immutable] final def key = _key + @`inline` private[immutable] final def value = _value.asInstanceOf[B] + @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] + @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isBlack = _count < 0 + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isRed = _count >= 0 + + override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" + + //mutable APIs + private[RedBlackTree] def makeImmutable: Tree[A, B] = { + def makeImmutableImpl() = { + if (isMutable) { + var size = 1 + if (_left ne null) { + _left.makeImmutable + size += _left.count + } + if (_right ne null) { + _right.makeImmutable + size += _right.count + } + _count |= size //retains colour + } + this + } + makeImmutableImpl() + this + } + + private[RedBlackTree] def mutableBlack: Tree[A, B] = { + if (isBlack) this + else if (isMutable) { + _count = initialBlackCount + this + } + else new Tree(_key, _value, _left, _right, initialBlackCount) + } +// private[RedBlackTree] def mutableRed: Tree[A, B] = { +// if (isRed) this +// else if (mutable) { +// _count = initialRedCount +// this +// } +// else new Tree(_key, _value, _left, _right, initialRedCount) +// } + + private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else if (isMutable) { + _value = newValue.asInstanceOf[AnyRef] + this + } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) + } + + private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if (_left eq newLeft) this + else if (isMutable) { + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if (_right eq newRight) this + else if (isMutable) { + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && (_right eq newRight)) this + else if (isMutable) { + _left = newLeft + _right = newRight + this + } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, initialBlackCount) + } + private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_right eq newRight) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, initialBlackCount) + } + + private[RedBlackTree] def black: Tree[A, B] = { + //assertNotMutable(this) + if (isBlack) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def red: Tree[A, B] = { + //assertNotMutable(this) + if (isRed) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && + (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this + else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + + private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if (newLeft eq _left) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) + } + } + private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if (newRight eq _right) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if ((newRight eq _right) && isBlack) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newRight eq _right) && isRed) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right)) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size) + } + } + } + //see #Tree docs "Colour, mutablity and size encoding" + //we make these final vals because the optimiser inlines them, without reference to the enclosing module + private[RedBlackTree] final val colourBit = 0x80000000 + //really its ~colourBit but that doesnt get inlined + private[RedBlackTree] final val colourMask = colourBit - 1 + private[RedBlackTree] final val initialBlackCount = colourBit + private[RedBlackTree] final val initialRedCount = 0 + + @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) + @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) + + /** create a new immutable red tree. + * left and right may be null + */ + private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) + } + private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size) + } + @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + //immutable APIs + //assertions - uncomment decls and callers when changing functionality + // private def devTimeAssert(assertion: Boolean) = { + // //uncomment this during development of the functionality + // assert(assertion) + // } + // private def assertNotMutable(t:Tree[_,_]) = { + // devTimeAssert ((t eq null) || t.count > 0) + // } + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { + protected[this] def nextResult(tree: Tree[A, B]): R + + override def hasNext: Boolean = lookahead ne null + + @throws[NoSuchElementException] + override def next(): R = { + val tree = lookahead + if(tree ne null) { + lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) + nextResult(tree) + } else Iterator.empty.next() + } + + @tailrec + protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else if (tree.left eq null) tree + else findLeftMostOrPopOnEmpty(goLeft(tree)) + + @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { + stackOfNexts(index) = tree + index += 1 + } + @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { + index -= 1 + stackOfNexts(index) + } + + protected[this] val stackOfNexts = if (root eq null) null else { + /* + * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. + * + * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) + * + * Although we don't store the deepest nodes in the path during iteration, + * we potentially do so in `startFrom`. + */ + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 + new Array[Tree[A, B] @uncheckedCaptures](maximumHeight) + } + private[this] var index = 0 + protected var lookahead: Tree[A, B] @uncheckedCaptures = + if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + + /** + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * the leftmost subtree with the key that would be "next" after it according + * to the ordering. Along the way build up the iterator's path stack so that "next" + * functionality works. + */ + private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { + @tailrec def find(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else find( + if (ordering.lteq(key, tree.key)) goLeft(tree) + else goRight(tree) + ) + find(root) + } + + @`inline` private[this] def goLeft(tree: Tree[A, B]) = { + pushNext(tree) + tree.left + } + + @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right + } + + private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { + override def nextResult(tree: Tree[A, B]) = ??? + + def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key) + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameValues[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameEntries[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + } + private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { + override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + } + + private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.key + } + + private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.value + } + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, Null] = size match { + case 0 => null + case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(x, null, left, right) + } + f(1, size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + mkTree(level != maxUsedDepth || level == 1, k, v, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(k, v, left, right) + } + f(1, size) + } + + def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] = + if(t eq null) null + else { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = transform(l, f) + val v2 = f(k, v) + val r2 = transform(r, f) + if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) + && (l2 eq l) + && (r2 eq r)) t.asInstanceOf[Tree[A, C]] + else mkTree(t.isBlack, k, v2, l2, r2) + } + + def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else { + def fk(t: Tree[A, B]): Tree[A, B] = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = if(l eq null) null else fk(l) + val keep = f(k, v) + val r2 = if(r eq null) null else fk(r) + if(!keep) join2(l2, r2) + else if((l2 eq l) && (r2 eq r)) t + else join(l2, k, v, r2) + } + blacken(fk(t)) + } + + private[this] val null2 = (null, null) + + def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { + if (t eq null) null2 + else { + object partitioner { + var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk + def fk(t: Tree[A, B]): Unit = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + var l2k, l2d, r2k, r2d = null: Tree[A, B] + if (l ne null) { + fk(l) + l2k = tmpk + l2d = tmpd + } + val keep = p(k, v) + if (r ne null) { + fk(r) + r2k = tmpk + r2d = tmpd + } + val jk = + if (!keep) join2(l2k, r2k) + else if ((l2k eq l) && (r2k eq r)) t + else join(l2k, k, v, r2k) + val jd = + if (keep) join2(l2d, r2d) + else if ((l2d eq l) && (r2d eq r)) t + else join(l2d, k, v, r2d) + tmpk = jk + tmpd = jd + } + } + + partitioner.fk(t) + (blacken(partitioner.tmpk), blacken(partitioner.tmpd)) + } + } + + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] + // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ + + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) { + val newLeft = del(tree.left, k) + if (newLeft eq tree.left) tree + else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right) + else tree.redWithLeft(newLeft) + } else if (cmp > 0) { + val newRight = del(tree.right, k) + if (newRight eq tree.right) tree + else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight) + else tree.redWithRight(newRight) + } else append(tree.left, tree.right) + } + + private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) { + if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black) + else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr)) + else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else tree.blackWithLeftRight(tl, tr) + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black) + else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right)) + else tree.blackWithLeftRight(tl, tr) + } else tree.blackWithLeftRight(tl, tr) + + private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr) + else if (isBlackTree(tr)) balance(tree, tl, tr.red) + else if (isRedTree(tr) && isBlackTree(tr.left)) + tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red)) + else sys.error("Defect: invariance violation") + + private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black) + else if (isBlackTree(tl)) balance(tree, tl.red, tr) + else if (isRedTree(tl) && isBlackTree(tl.right)) + tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else sys.error("Defect: invariance violation") + + /** `append` is similar to `join2` but requires that both subtrees have the same black height */ + private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = { + if (tl eq null) tr + else if (tr eq null) tl + else if (tl.isRed) { + if (tr.isRed) { + //tl is red, tr is red + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else tl.withRight(tr.withLeft(bc)) + } else { + //tl is red, tr is black + tl.withRight(append(tl.right, tr)) + } + } else { + if (tr.isBlack) { + //tl is black tr is black + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else balLeft(tl, tl.left, tr.withLeft(bc)) + } else { + //tl is black tr is red + tr.withLeft(append(tl, tr.left)) + } + } + } + + + // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf) + // We don't store the black height in the tree so we pass it down into the join methods and derive the black height + // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it. + // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference. + + def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2)) + + def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2)) + + def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] = + blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]])) + + /** Compute the rank from a tree and its black height */ + @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = { + if(t eq null) 0 + else if(t.isBlack) 2*(bh-1) + else 2*bh-1 + } + + private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = { + val rtl = rank(tl, bhtl) + if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr) + else { + val tlBlack = isBlackTree(tl) + val bhtlr = if(tlBlack) bhtl-1 else bhtl + val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr) + if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right)) + RedTree(ttr.key, ttr.value, + BlackTree(tl.key, tl.value, tl.left, ttr.left), + ttr.right.black) + else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr) + } + } + + private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = { + val rtr = rank(tr, bhtr) + if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr) + else { + val trBlack = isBlackTree(tr) + val bhtrl = if(trBlack) bhtr-1 else bhtr + val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl) + if(trBlack && isRedTree(ttl) && isRedTree(ttl.left)) + RedTree(ttl.key, ttl.value, + ttl.left.black, + BlackTree(tr.key, tr.value, ttl.right, tr.right)) + else mkTree(trBlack, tr.key, tr.value, ttl, tr.right) + } + } + + private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = { + @tailrec def h(t: Tree[_, _], i: Int): Int = + if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i) + val bhtl = h(tl, 0) + val bhtr = h(tr, 0) + if(bhtl > bhtr) { + val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr)) + if(isRedTree(tt) && isRedTree(tt.right)) tt.black + else tt + } else if(bhtr > bhtl) { + val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr) + if(isRedTree(tt) && isRedTree(tt.left)) tt.black + else tt + } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr) + } + + private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) = + if(t eq null) (null, null, null, k2) + else { + val cmp = ordering.compare(k2, t.key) + if(cmp == 0) (t.left, t, t.right, t.key) + else if(cmp < 0) { + val (ll, b, lr, k1) = split(t.left, k2) + (ll, b, join(lr, t.key, t.value, t.right), k1) + } else { + val (rl, b, rr, k1) = split(t.right, k2) + (join(t.left, t.key, t.value, rl), b, rr, k1) + } + } + + private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) = + if(t.right eq null) (t.left, t.key, t.value) + else { + val (tt, kk, vv) = splitLast(t.right) + (join(t.left, t.key, t.value, tt), kk, vv) + } + + private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if(tl eq null) tr + else if(tr eq null) tl + else { + val (ttl, k, v) = splitLast(tl) + join(ttl, k, v, tr) + } + + private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t1 eq t2)) t2 + else if(t2 eq null) t1 + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _union(l1, t2.left) + val tr = _union(r1, t2.right) + join(tl, k1, t2.value, tr) + } + + private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) null + else if (t1 eq t2) t1 + else { + val (l1, b, r1, k1) = split(t1, t2.key) + val tl = _intersect(l1, t2.left) + val tr = _intersect(r1, t2.right) + if(b ne null) join(tl, k1, t2.value, tr) + else join2(tl, tr) + } + + private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) t1 + else if (t1 eq t2) null + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _difference(l1, t2.left) + val tr = _difference(r1, t2.right) + join2(tl, tr) + } +} From d526e10754018f3fb25cf4798e864f9caaf2d05d Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 11:46:11 +0100 Subject: [PATCH 109/216] Add immutable SeqMap to stdlib --- .../stdlib/collection/immutable/SeqMap.scala | 278 ++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/SeqMap.scala diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala new file mode 100644 index 000000000000..6c955fd52fc2 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala @@ -0,0 +1,278 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** A base trait for ordered, immutable maps. + * + * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs + * without regard to ordering. + * + * All behavior is defined in terms of the abstract methods in `SeqMap`. + * It is sufficient for concrete subclasses to implement those methods. + * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] + extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + + +object SeqMap extends MapFactory[SeqMap] { + def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] = + it match { + case sm: SeqMap[K, V] => sm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl + + @SerialVersionUID(3L) + private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) + def removed(key: Any): SeqMap[Any, Nothing] = this + } + + @SerialVersionUID(3L) + private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator = Iterator.single((key1, value1)) + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap1(key1, value) + else new SeqMap2(key1, value1, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) SeqMap.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + } + } + + @SerialVersionUID(3L) + private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap2(key1, value, key2, value2) + else if (key == key2) new SeqMap2(key1, value1, key2, value) + else new SeqMap3(key1, value1, key2, value2, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap1(key2, value2) + else if (key == key2) new SeqMap1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + } + } + + @SerialVersionUID(3L) + private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) + else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) + else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap2(key2, value2, key3, value3) + else if (key == key2) new SeqMap2(key1, value1, key3, value3) + else if (key == key3) new SeqMap2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + } + } + + @SerialVersionUID(3L) + private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 4 + override def knownSize: Int = 4 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) + else { + // Directly create the elements for performance reasons + val fields = Vector(key1, key2, key3, key4, key) + val underlying: Map[K, (Int, V1)] = + HashMap( + (key1, (0, value1)), + (key2, (1, value2)), + (key3, (2, value3)), + (key4, (3, value4)), + (key, (4, value)) + ) + new VectorMap(fields, underlying) + } + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + f(key4, value4) + } + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] @uncheckedCaptures = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] @uncheckedCaptures = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } + } +} From 26dde726255cef3d899cabe7508b1aa207d904ef Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 11:48:52 +0100 Subject: [PATCH 110/216] Add immutable SortedMap to stdlib --- .../collection/immutable/SortedMap.scala | 178 ++++++++++++++++++ 1 file changed, 178 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/SortedMap.scala diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala new file mode 100644 index 000000000000..9587502fd908 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala @@ -0,0 +1,178 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import language.experimental.captureChecking + +/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. + * + * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in + * sorted order, according to the map's [[scala.math.Ordering]]. + * + * @example {{{ + * import scala.collection.immutable.SortedMap + * + * // Make a SortedMap via the companion object factory + * val weekdays = SortedMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + */ +trait SortedMap[K, +V] + extends Map[K, V] + with collection.SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) +} + +trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => + + protected def coll: C with CC[K, V] + + def unsorted: Map[K, V] + + override def keySet: SortedSet[K] = new ImmutableKeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = self.rangeImpl(from, until) + new map.ImmutableKeySortedSet + } + def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) + def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) + } + + // We override these methods to fix their return type (which would be `Map` otherwise) + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { + // Implementation has been copied from `MapOps` + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) +} + +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with collection.StrictOptimizedSortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = { + var result: CC[K, V2] = coll + val it = xs.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match { + case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm + case _ => super.from(it) + } + + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { + + implicit def ordering: Ordering[K] = underlying.ordering + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = + new WithDefault( underlying.concat(xs) , defaultValue) + + override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} From a08460ddfe2faab8e3c86776857334e3a63b37be Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 11:57:10 +0100 Subject: [PATCH 111/216] Add immutable StrictOptimizedSeqOps to stdlib --- .../collection/StrictOptimizedSeqOps.scala | 2 +- .../stdlib/collection/immutable/Seq.scala | 2 +- .../immutable/StrictOptimizedSeqOps.scala | 82 +++++++++++++++++++ 3 files changed, 84 insertions(+), 2 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 7a5c58bf2abf..bfea9eda8bd3 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -19,7 +19,7 @@ import scala.annotation.unchecked.uncheckedCaptures * to take advantage of strict builders. */ trait StrictOptimizedSeqOps [+A, +CC[_], +C] - extends AnyRef + extends Any with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index 5184cadaccae..d575c3aaf14a 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -30,7 +30,7 @@ trait Seq[+A] extends Iterable[A] * @define coll immutable sequence * @define Coll `immutable.Seq` */ -trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] +trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] /** * $factoryInfo diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..b1e4622971fb --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala @@ -0,0 +1,82 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures + +/** + * Trait that overrides operations to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps[+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with collection.StrictOptimizedSeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A -> B): C = { + if (lengthCompare(1) <= 0) coll + else { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B @uncheckedCaptures] + val it = this.iterator + var different = false + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next else different = true + } + if (different) builder.result() else coll + } + } + + override def updated[B >: A](index: Int, elem: B): CC[B] = { + if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})") + val b = iterableFactory.newBuilder[B] + if (knownSize >= 0) { + b.sizeHint(size) + } + var i = 0 + val it = iterator + while (i < index && it.hasNext) { + b += it.next() + i += 1 + } + if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})") + b += elem + it.next() + while (it.hasNext) b += it.next() + b.result() + } + + override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = { + val b = iterableFactory.newBuilder[B] + var i = 0 + val it = iterator + while (i < from && it.hasNext) { + b += it.next() + i += 1 + } + b ++= other + i = replaced + while (i > 0 && it.hasNext) { + it.next() + i -= 1 + } + while (it.hasNext) b += it.next() + b.result() + } + + override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) + +} From c66c833eb9b4b7a8677d7a437b94e7e395b8989a Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 12:05:57 +0100 Subject: [PATCH 112/216] Add Vector and VectorMap to stdlib --- .../stdlib/collection/immutable/Vector.scala | 2476 +++++++++++++++++ .../collection/immutable/VectorMap.scala | 277 ++ 2 files changed, 2753 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/Vector.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/VectorMap.scala diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala new file mode 100644 index 000000000000..1bde30406fd9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -0,0 +1,2476 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.Math.{abs, max => mmax, min => mmin} +import java.util.Arrays.{copyOf, copyOfRange} +import java.util.{Arrays, Spliterator} + +import scala.annotation.switch +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.VectorInline._ +import scala.collection.immutable.VectorStatics._ +import scala.collection.mutable.ReusableBuilder +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + + +/** $factoryInfo + * @define Coll `Vector` + * @define coll vector + */ +@SerialVersionUID(3L) +object Vector extends StrictOptimizedSeqFactory[Vector] { + + def empty[A]: Vector[A] = Vector0 + + def from[E](it: collection.IterableOnce[E]^): Vector[E] = + it match { + case v: Vector[E] => v + case _ => + val knownSize = it.knownSize + if (knownSize == 0) empty[E] + else if (knownSize > 0 && knownSize <= WIDTH) { + val a1: Arr1 = it match { + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => + as.unsafeArray.asInstanceOf[Arr1] + case it: Iterable[E] => + val a1 = new Arr1(knownSize) + it.copyToArray(a1.asInstanceOf[Array[Any]]) + a1 + case _ => + val a1 = new Arr1(knownSize) + it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) + a1.asInstanceOf[Arr1] + } + new Vector1[E](a1) + } else { + (newBuilder ++= it).result() + } + } + + def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] + + /** Create a Vector with the same element at each index. + * + * Unlike `fill`, which takes a by-name argument for the value and can thereby + * compute different values for each index, this method guarantees that all + * elements are identical. This allows sparse allocation in O(log n) time and space. + */ + private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { + //TODO Make public; this method is private for now because it is not forward binary compatible + if(n <= 0) Vector0 + else { + val b = new VectorBuilder[A] + b.initSparse(n, elem) + b.result() + } + } + + private val defaultApplyPreferredMaxLength: Int = + try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", + "250").toInt + catch { + case _: SecurityException => 250 + } + + private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) +} + + +/** Vector is a general-purpose, immutable data structure. It provides random access and updates + * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). + * Because vectors strike a good balance between fast random selections and fast random functional updates, + * they are currently the default implementation of immutable indexed sequences. + * + * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass + * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the + * top level). + * + * Tree balancing: + * - Only the first dimension of an array may have a size < WIDTH + * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up + * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 + * - `prefix1` and `suffix1` are never empty + * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches + * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty + * - All arrays are left-aligned and truncated + * + * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running + * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. + */ +sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, Vector, Vector[A]] + with StrictOptimizedSeqOps[A, Vector, Vector[A]] + with IterableFactoryDefaults[A, Vector] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Vector] = Vector + + override final def length: Int = + if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 + else prefix1.length + + override final def iterator: Iterator[A] = + if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator + else new NewVectorIterator(this, length, vectorSliceCount) + + override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { + var i = 0 + val len = prefix1.length + while (i != len) { + if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { + // each 1 bit indicates that index passes the filter. + // all indices < i are also assumed to pass the filter + var bitmap = 0 + var j = i + 1 + while (j < len) { + if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { + bitmap |= (1 << j) + } + j += 1 + } + val newLen = i + java.lang.Integer.bitCount(bitmap) + + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + var k = 0 + while(k < i) { + b.addOne(prefix1(k).asInstanceOf[A]) + k += 1 + } + k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + b.addOne(prefix1(k).asInstanceOf[A]) + i += 1 + } + k += 1 + } + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + return b.result() + } else { + if (newLen == 0) return Vector0 + val newData = new Array[AnyRef](newLen) + System.arraycopy(prefix1, 0, newData, 0, i) + var k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + newData(i) = prefix1(k) + i += 1 + } + k += 1 + } + return new Vector1[A](newData) + } + } + i += 1 + } + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + b.initFrom(prefix1) + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + b.result() + } else this + } + + // Dummy overrides to refine result types for binary compatibility: + override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) + override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) + override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = { + val k = prefix.knownSize + if (k == 0) this + else if (k < 0) super.prependedAll(prefix) + else prependedAll0(prefix, k) + } + + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = { + val k = suffix.knownSize + if (k == 0) this + else if (k < 0) super.appendedAll(suffix) + else appendedAll0(suffix, k) + } + + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + // k >= 0, k = prefix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { + var v: Vector[B] = this + val it = IndexedSeq.from(prefix).reverseIterator + while (it.hasNext) v = it.next() +: v + v + } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { + var v = prefix.asInstanceOf[Vector[B]] + val it = this.iterator + while (it.hasNext) v = v :+ it.next() + v + } else if (k < this.size - AlignToFaster) { + new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() + } else super.prependedAll(prefix) + } + + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + // k >= 0, k = suffix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit) { + var v: Vector[B] = this + suffix match { + case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) + case _ => suffix.iterator.foreach(x => v = v.appended(x)) + } + v + } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { + var v = suffix.asInstanceOf[Vector[B]] + val ri = this.reverseIterator + while (ri.hasNext) v = v.prepended(ri.next()) + v + } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { + val v = suffix.asInstanceOf[Vector[B]] + new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() + } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() + } + + override def className = "Vector" + + @inline override final def take(n: Int): Vector[A] = slice(0, n) + @inline override final def drop(n: Int): Vector[A] = slice(n, length) + @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) + @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) + override def tail: Vector[A] = slice(1, length) + override def init: Vector[A] = slice(0, length-1) + + /** Like slice but parameters must be 0 <= lo < hi < length */ + protected[this] def slice0(lo: Int, hi: Int): Vector[A] + + /** Number of slices */ + protected[immutable] def vectorSliceCount: Int + /** Slice at index */ + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] + /** Length of all slices up to and including index */ + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int + + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + + override def toVector: Vector[A] = this + + override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + val s = shape.shape match { + case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) + case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) + case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) + case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) + } + s.asInstanceOf[S with EfficientSplit] + } + + protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") + + override final def head: A = + if (prefix1.length == 0) throw new NoSuchElementException("empty.head") + else prefix1(0).asInstanceOf[A] + + override final def last: A = { + if(this.isInstanceOf[BigVector[_]]) { + val suffix = this.asInstanceOf[BigVector[_]].suffix1 + if(suffix.length == 0) throw new NoSuchElementException("empty.tail") + else suffix(suffix.length-1) + } else prefix1(prefix1.length-1) + }.asInstanceOf[A] + + override final def foreach[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 0 + while (i < c) { + foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) + i += 1 + } + } + + // The following definitions are needed for binary compatibility with ParVector + private[collection] def startIndex: Int = 0 + private[collection] def endIndex: Int = length + private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = + s.it = iterator.asInstanceOf[NewVectorIterator[B]] +} + + +/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ +private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { + + override final def slice(from: Int, until: Int): Vector[A] = { + val lo = mmax(from, 0) + val hi = mmin(until, length) + if (hi <= lo) Vector0 + else if (hi - lo == length) this + else slice0(lo, hi) + } +} + + +/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ +private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { + + protected[immutable] final def foreachRest[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 1 + while(i < c) { + foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) + i += 1 + } + } +} + + +/** Empty vector */ +private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { + + def apply(index: Int): Nothing = throw ioob(index) + + override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) + + override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def map[B](f: Nothing => B): Vector[B] = this + + override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") + + override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") + + protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this + + protected[immutable] def vectorSliceCount: Int = 0 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 + + override def equals(o: Any): Boolean = { + if(this eq o.asInstanceOf[AnyRef]) true + else o match { + case that: Vector[_] => false + case o => super.equals(o) + } + } + + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + Vector.from(prefix) + + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = + Vector.from(suffix) + + override protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") +} + +/** Flat ArraySeq-like structure */ +private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { + + @inline def apply(index: Int): A = { + if(index >= 0 && index < prefix1.length) + prefix1(index).asInstanceOf[A] + else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < prefix1.length) + new Vector1(copyUpdate(prefix1, index, elem)) + else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) + else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) + else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) + } + + override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = + new Vector1(copyOfRange(prefix1, lo, hi)) + + override def tail: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyTail(prefix1)) + + override def init: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyInit(prefix1)) + + protected[immutable] def vectorSliceCount: Int = 1 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case data1b => new Vector1(data1b) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val data1b = append1IfSpace(prefix1, suffix) + if(data1b ne null) new Vector1(data1b) + else super.appendedAll0(suffix, k) + } +} + + +/** 2-dimensional radix-balanced finger tree */ +private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val data2: Arr2, + _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + data2: Arr2 = data2, + suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector2(prefix1, len1, data2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1 + if(io >= 0) { + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) data2(i2)(i1) + else suffix1(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1) { + val io = index - len1 + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) + else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) + else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, data2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 3 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => data2 + case 2 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => length0 - suffix1.length + case 2 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 3-dimensional radix-balanced finger tree */ +private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val data3: Arr3, + private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + data3: Arr3 = data3, + suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12 + if(io >= 0) { + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i3 < data3.length) data3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12) { + val io = index - len12 + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) + else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) + else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), + data3 = mapElems(3, data3, f), + suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, data3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 5 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => data3 + case 3 => suffix2 + case 4 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len12 + data3.length*WIDTH2 + case 3 => length0 - suffix1.length + case 4 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 4-dimensional radix-balanced finger tree */ +private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val data4: Arr4, + private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + data4: Arr4 = data4, + suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len123 + if(io >= 0) { + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i4 < data4.length) data4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len123) { + val io = index - len123 + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) + else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) + else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), + data4 = mapElems(4, data4, f), + suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, data4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 7 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => data4 + case 4 => suffix3 + case 5 => suffix2 + case 6 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len123 + data4.length*WIDTH3 + case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 + case 5 => length0 - suffix1.length + case 6 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 5-dimensional radix-balanced finger tree */ +private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val data5: Arr5, + private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + data5: Arr5 = data5, + suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1234 + if(io >= 0) { + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1234) { + val io = index - len1234 + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) + else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) + else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), + data5 = mapElems(5, data5, f), + suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, data5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 9 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => data5 + case 5 => suffix4 + case 6 => suffix3 + case 7 => suffix2 + case 8 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len1234 + data5.length*WIDTH4 + case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 7 => length0 - suffix1.length + case 8 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 6-dimensional radix-balanced finger tree */ +private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, + private[immutable] val data6: Arr6, + private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + prefix5: Arr5 = prefix5, len12345: Int = len12345, + data6: Arr6 = data6, + suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12345 + if(io >= 0) { + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) + else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1234) { + val io = index - len1234 + prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12345) { + val io = index - len12345 + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) + else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1234) { + val io = index - len1234 + copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), + data6 = mapElems(6, data6, f), + suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, prefix5) + b.consider(6, data6) + b.consider(5, suffix5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 11 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => prefix5 + case 5 => data6 + case 6 => suffix5 + case 7 => suffix4 + case 8 => suffix3 + case 9 => suffix2 + case 10 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len12345 + case 5 => len12345 + data6.length*WIDTH5 + case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 9 => length0 - suffix1.length + case 10 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + len12345 = len12345 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** Helper class for vector slicing. It is initialized with the validated start and end index, + * then the vector slices are added in succession with `consider`. No matter what the dimension + * of the originating vector is or where the cut is performed, this always results in a + * structure with the highest-dimensional data in the middle and fingers of decreasing dimension + * at both ends, which can be turned into a new vector with very little rebalancing. + */ +private final class VectorSliceBuilder(lo: Int, hi: Int) { + //println(s"***** VectorSliceBuilder($lo, $hi)") + + private[this] val slices = new Array[Array[AnyRef]](11) + private[this] var len, pos, maxDim = 0 + + @inline private[this] def prefixIdx(n: Int) = n-1 + @inline private[this] def suffixIdx(n: Int) = 11-n + + def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** consider($n, /${a.length})") + val count = a.length * (1 << (BITS*(n-1))) + val lo0 = mmax(lo-pos, 0) + val hi0 = mmin(hi-pos, count) + if(hi0 > lo0) { + addSlice(n, a, lo0, hi0) + len += (hi0 - lo0) + } + pos += count + } + + private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { + //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") + if(n == 1) { + add(1, copyOrUse(a, lo, hi)) + } else { + val bitsN = BITS * (n-1) + val widthN = 1 << bitsN + val loN = lo >>> bitsN + val hiN = hi >>> bitsN + val loRest = lo & (widthN - 1) + val hiRest = hi & (widthN - 1) + //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") + if(loRest == 0) { + if(hiRest == 0) { + add(n, copyOrUse(a, loN, hiN)) + } else { + if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } else { + if(hiN == loN) { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) + } else { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) + if(hiRest == 0) { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + } else { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } + } + } + } + + private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** add($n, /${a.length})") + val idx = + if(n <= maxDim) suffixIdx(n) + else { maxDim = n; prefixIdx(n) } + slices(idx) = a.asInstanceOf[Array[AnyRef]] + } + + def result[A](): Vector[A] = { + //println(s"***** result: $len, $maxDim") + if(len <= 32) { + if(len == 0) Vector0 + else { + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") + val a: Arr1 = + if(prefix1 ne null) { + if(suffix1 ne null) concatArrays(prefix1, suffix1) + else prefix1 + } else if(suffix1 ne null) suffix1 + else { + val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] + if(prefix2 ne null) prefix2(0) + else { + val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] + suffix2(0) + } + } + new Vector1(a) + } + } else { + balancePrefix(1) + balanceSuffix(1) + var resultDim = maxDim + if(resultDim < 6) { + val pre = slices(prefixIdx(maxDim)) + val suf = slices(suffixIdx(maxDim)) + if((pre ne null) && (suf ne null)) { + // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, + // otherwise increase the dimension + if(pre.length + suf.length <= WIDTH-2) { + slices(prefixIdx(maxDim)) = concatArrays(pre, suf) + slices(suffixIdx(maxDim)) = null + } else resultDim += 1 + } else { + // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we + // only allow WIDTH-2 for the main data, so increase the dimension in this case + val one = if(pre ne null) pre else suf + if(one.length > WIDTH-2) resultDim += 1 + } + } + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + val len1 = prefix1.length + val res = (resultDim: @switch) match { + case 2 => + val data2 = dataOr(2, empty2) + new Vector2[A](prefix1, len1, data2, suffix1, len) + case 3 => + val prefix2 = prefixOr(2, empty2) + val data3 = dataOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) + case 4 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val data4 = dataOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) + case 5 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val data5 = dataOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) + case 6 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val prefix5 = prefixOr(5, empty5) + val data6 = dataOr(6, empty6) + val suffix5 = suffixOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + val len12345 = len1234 + (prefix5.length * WIDTH4) + new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) + } + res + } + } + + @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] else a + } + + @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + + @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] + else { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + } + + /** Ensure prefix is not empty */ + private[this] def balancePrefix(n: Int): Unit = { + if(slices(prefixIdx(n)) eq null) { + if(n == maxDim) { + slices(prefixIdx(n)) = slices(suffixIdx(n)) + slices(suffixIdx(n)) = null + } else { + balancePrefix(n+1) + val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(preN1 ne null) + slices(prefixIdx(n)) = preN1(0) + if(preN1.length == 1) { + slices(prefixIdx(n+1)) = null + if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n + } else { + slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] + } + } + } + } + + /** Ensure suffix is not empty */ + private[this] def balanceSuffix(n: Int): Unit = { + if(slices(suffixIdx(n)) eq null) { + if(n == maxDim) { + slices(suffixIdx(n)) = slices(prefixIdx(n)) + slices(prefixIdx(n)) = null + } else { + balanceSuffix(n+1) + val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") + slices(suffixIdx(n)) = sufN1(sufN1.length-1) + if(sufN1.length == 1) { + slices(suffixIdx(n+1)) = null + if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n + } else { + slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] + } + } + } + } + + override def toString: String = + s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" + + private[immutable] def getSlices: Array[Array[AnyRef]] = slices +} + + +final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { + + private[this] var a6: Arr6 = _ + private[this] var a5: Arr5 = _ + private[this] var a4: Arr4 = _ + private[this] var a3: Arr3 = _ + private[this] var a2: Arr2 = _ + private[this] var a1: Arr1 = new Arr1(WIDTH) + private[this] var len1, lenRest, offset = 0 + private[this] var prefixIsRightAligned = false + private[this] var depth = 1 + + @inline private[this] final def setLen(i: Int): Unit = { + len1 = i & MASK + lenRest = i - len1 + } + + override def knownSize: Int = len1 + lenRest - offset + + @inline def size: Int = knownSize + @inline def isEmpty: Boolean = knownSize == 0 + @inline def nonEmpty: Boolean = knownSize != 0 + + def clear(): Unit = { + a6 = null + a5 = null + a4 = null + a3 = null + a2 = null + a1 = new Arr1(WIDTH) + len1 = 0 + lenRest = 0 + offset = 0 + prefixIsRightAligned = false + depth = 1 + } + + private[immutable] def initSparse(size: Int, elem: A): Unit = { + setLen(size) + Arrays.fill(a1, elem) + if(size > WIDTH) { + a2 = new Array(WIDTH) + Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) + if(size > WIDTH2) { + a3 = new Array(WIDTH) + Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) + if(size > WIDTH3) { + a4 = new Array(WIDTH) + Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) + if(size > WIDTH4) { + a5 = new Array(WIDTH) + Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) + if(size > WIDTH5) { + a6 = new Array(LASTWIDTH) + Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) + depth = 6 + } else depth = 5 + } else depth = 4 + } else depth = 3 + } else depth = 2 + } else depth = 1 + } + + private[immutable] def initFrom(prefix1: Arr1): Unit = { + depth = 1 + setLen(prefix1.length) + a1 = copyOrUse(prefix1, 0, WIDTH) + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + } + + private[immutable] def initFrom(v: Vector[_]): this.type = { + (v.vectorSliceCount: @switch) match { + case 0 => + case 1 => + val v1 = v.asInstanceOf[Vector1[_]] + depth = 1 + setLen(v1.prefix1.length) + a1 = copyOrUse(v1.prefix1, 0, WIDTH) + case 3 => + val v2 = v.asInstanceOf[Vector2[_]] + val d2 = v2.data2 + a1 = copyOrUse(v2.suffix1, 0, WIDTH) + depth = 2 + offset = WIDTH - v2.len1 + setLen(v2.length0 + offset) + a2 = new Arr2(WIDTH) + a2(0) = v2.prefix1 + System.arraycopy(d2, 0, a2, 1, d2.length) + a2(d2.length+1) = a1 + case 5 => + val v3 = v.asInstanceOf[Vector3[_]] + val d3 = v3.data3 + val s2 = v3.suffix2 + a1 = copyOrUse(v3.suffix1, 0, WIDTH) + depth = 3 + offset = WIDTH2 - v3.len12 + setLen(v3.length0 + offset) + a3 = new Arr3(WIDTH) + a3(0) = copyPrepend(v3.prefix1, v3.prefix2) + System.arraycopy(d3, 0, a3, 1, d3.length) + a2 = copyOf(s2, WIDTH) + a3(d3.length+1) = a2 + a2(s2.length) = a1 + case 7 => + val v4 = v.asInstanceOf[Vector4[_]] + val d4 = v4.data4 + val s3 = v4.suffix3 + val s2 = v4.suffix2 + a1 = copyOrUse(v4.suffix1, 0, WIDTH) + depth = 4 + offset = WIDTH3 - v4.len123 + setLen(v4.length0 + offset) + a4 = new Arr4(WIDTH) + a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) + System.arraycopy(d4, 0, a4, 1, d4.length) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a4(d4.length+1) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 9 => + val v5 = v.asInstanceOf[Vector5[_]] + val d5 = v5.data5 + val s4 = v5.suffix4 + val s3 = v5.suffix3 + val s2 = v5.suffix2 + a1 = copyOrUse(v5.suffix1, 0, WIDTH) + depth = 5 + offset = WIDTH4 - v5.len1234 + setLen(v5.length0 + offset) + a5 = new Arr5(WIDTH) + a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) + System.arraycopy(d5, 0, a5, 1, d5.length) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a5(d5.length+1) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 11 => + val v6 = v.asInstanceOf[Vector6[_]] + val d6 = v6.data6 + val s5 = v6.suffix5 + val s4 = v6.suffix4 + val s3 = v6.suffix3 + val s2 = v6.suffix2 + a1 = copyOrUse(v6.suffix1, 0, WIDTH) + depth = 6 + offset = WIDTH5 - v6.len12345 + setLen(v6.length0 + offset) + a6 = new Arr6(LASTWIDTH) + a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) + System.arraycopy(d6, 0, a6, 1, d6.length) + a5 = copyOf(s5, WIDTH) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a6(d6.length+1) = a5 + a5(s5.length) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + } + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + this + } + + //TODO Make public; this method is only private for binary compatibility + private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { + if (len1 != 0 || lenRest != 0) + throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") + val (prefixLength, maxPrefixLength) = bigVector match { + case Vector0 => (0, 1) + case v1: Vector1[_] => (0, 1) + case v2: Vector2[_] => (v2.len1, WIDTH) + case v3: Vector3[_] => (v3.len12, WIDTH2) + case v4: Vector4[_] => (v4.len123, WIDTH3) + case v5: Vector5[_] => (v5.len1234, WIDTH4) + case v6: Vector6[_] => (v6.len12345, WIDTH5) + } + if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector + val overallPrefixLength = (before + prefixLength) % maxPrefixLength + offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength + // pretend there are already `offset` elements added + advanceN(offset & ~MASK) + len1 = offset & MASK + prefixIsRightAligned = true + this + } + + /** + * Removes `offset` leading `null`s in the prefix. + * This is needed after calling `alignTo` and subsequent additions, + * directly before the result is used for creating a new Vector. + * Note that the outermost array keeps its length to keep the + * Builder re-usable. + * + * example: + * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) + * becomes + * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) + */ + private[this] def leftAlignPrefix(): Unit = { + @inline def shrinkOffsetIfToLarge(width: Int): Unit = { + val newOffset = offset % width + lenRest -= offset - newOffset + offset = newOffset + } + var a: Array[AnyRef] = null // the array we modify + var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a + if (depth >= 6) { + a = a6.asInstanceOf[Array[AnyRef]] + val i = offset >>> BITS5 + if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) + shrinkOffsetIfToLarge(WIDTH5) + if ((lenRest >>> BITS5) == 0) depth = 5 + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 5) { + if (a == null) a = a5.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS4) & MASK + if (depth == 5) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a5 = a.asInstanceOf[Arr5] + shrinkOffsetIfToLarge(WIDTH4) + if ((lenRest >>> BITS4) == 0) depth = 4 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 4) { + if (a == null) a = a4.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS3) & MASK + if (depth == 4) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a4 = a.asInstanceOf[Arr4] + shrinkOffsetIfToLarge(WIDTH3) + if ((lenRest >>> BITS3) == 0) depth = 3 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 3) { + if (a == null) a = a3.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS2) & MASK + if (depth == 3) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a3 = a.asInstanceOf[Arr3] + shrinkOffsetIfToLarge(WIDTH2) + if ((lenRest >>> BITS2) == 0) depth = 2 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 2) { + if (a == null) a = a2.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS) & MASK + if (depth == 2) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a2 = a.asInstanceOf[Arr2] + shrinkOffsetIfToLarge(WIDTH) + if ((lenRest >>> BITS) == 0) depth = 1 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 1) { + if (a == null) a = a1.asInstanceOf[Array[AnyRef]] + val i = offset & MASK + if (depth == 1) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a1 = a.asInstanceOf[Arr1] + len1 -= offset + offset = 0 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + } + prefixIsRightAligned = false + } + + def addOne(elem: A): this.type = { + if(len1 == WIDTH) advance() + a1(len1) = elem.asInstanceOf[AnyRef] + len1 += 1 + this + } + + private[this] def addArr1(data: Arr1): Unit = { + val dl = data.length + if(dl > 0) { + if(len1 == WIDTH) advance() + val copy1 = mmin(WIDTH-len1, dl) + val copy2 = dl - copy1 + System.arraycopy(data, 0, a1, len1, copy1) + len1 += copy1 + if(copy2 > 0) { + advance() + System.arraycopy(data, copy1, a1, 0, copy2) + len1 += copy2 + } + } + } + + private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { +// assert(dim >= 2) +// assert(lenRest % WIDTH == 0) +// assert(len1 == 0 || len1 == WIDTH) + if (slice.isEmpty) return + if (len1 == WIDTH) advance() + val sl = slice.length + (dim: @switch) match { + case 2 => + // lenRest is always a multiple of WIDTH + val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS) & MASK + System.arraycopy(slice, 0, a2, destPos, copy1) + advanceN(WIDTH * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a2, 0, copy2) + advanceN(WIDTH * copy2) + } + case 3 => + if (lenRest % WIDTH2 != 0) { + // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) + return + } + val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS2) & MASK + System.arraycopy(slice, 0, a3, destPos, copy1) + advanceN(WIDTH2 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a3, 0, copy2) + advanceN(WIDTH2 * copy2) + } + case 4 => + if (lenRest % WIDTH3 != 0) { + // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) + return + } + val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS3) & MASK + System.arraycopy(slice, 0, a4, destPos, copy1) + advanceN(WIDTH3 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a4, 0, copy2) + advanceN(WIDTH3 * copy2) + } + case 5 => + if (lenRest % WIDTH4 != 0) { + // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) + return + } + val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS4) & MASK + System.arraycopy(slice, 0, a5, destPos, copy1) + advanceN(WIDTH4 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a5, 0, copy2) + advanceN(WIDTH4 * copy2) + } + case 6 => // note width is now LASTWIDTH + if (lenRest % WIDTH5 != 0) { + // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) + return + } + val copy1 = sl + // there is no copy2 because there can't be another a6 to copy to + val destPos = lenRest >>> BITS5 + if (destPos + copy1 > LASTWIDTH) + throw new IllegalArgumentException("exceeding 2^31 elements") + System.arraycopy(slice, 0, a6, destPos, copy1) + advanceN(WIDTH5 * copy1) + } + } + + private[this] def addVector(xs: Vector[A]): this.type = { + val sliceCount = xs.vectorSliceCount + var sliceIdx = 0 + while(sliceIdx < sliceCount) { + val slice = xs.vectorSlice(sliceIdx) + vectorSliceDim(sliceCount, sliceIdx) match { + case 1 => addArr1(slice.asInstanceOf[Arr1]) + case n if len1 == WIDTH || len1 == 0 => + addArrN(slice.asInstanceOf[Array[AnyRef]], n) + case n => foreachRec(n-2, slice, addArr1) + } + sliceIdx += 1 + } + this + } + + override def addAll(xs: IterableOnce[A]^): this.type = xs match { + case v: Vector[_] => + if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) + else addVector(v.asInstanceOf[Vector[A]]) + case _ => + super.addAll(xs) + } + + private[this] def advance(): Unit = { + val idx = lenRest + WIDTH + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advanceN(n: Int): Unit = if (n > 0) { + // assert(n % 32 == 0) + val idx = lenRest + n + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advance1(idx: Int, xor: Int): Unit = { + if (xor <= 0) { // level = 6 or something very unexpected happened + throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") + } else if (xor < WIDTH2) { // level = 1 + if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } + a1 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + } else if (xor < WIDTH3) { // level = 2 + if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + } else if (xor < WIDTH4) { // level = 3 + if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + } else if (xor < WIDTH5) { // level = 4 + if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + } else { // level = 5 + if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a5 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + a6(idx >>> BITS5) = a5 + } + } + + def result(): Vector[A] = { + if (prefixIsRightAligned) leftAlignPrefix() + val len = len1 + lenRest + val realLen = len - offset + if(realLen == 0) Vector.empty + else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") + else if(len <= WIDTH) { + new Vector1(copyIfDifferentSize(a1, realLen)) + } else if(len <= WIDTH2) { + val i1 = (len-1) & MASK + val i2 = (len-1) >>> BITS + val data = copyOfRange(a2, 1, i2) + val prefix1 = a2(0) + val suffix1 = copyIfDifferentSize(a2(i2), i1+1) + new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) + } else if(len <= WIDTH3) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) + val data = copyOfRange(a3, 1, i3) + val prefix2 = copyTail(a3(0)) + val prefix1 = a3(0)(0) + val suffix2 = copyOf(a3(i3), i2) + val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) + } else if(len <= WIDTH4) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) + val data = copyOfRange(a4, 1, i4) + val prefix3 = copyTail(a4(0)) + val prefix2 = copyTail(a4(0)(0)) + val prefix1 = a4(0)(0)(0) + val suffix3 = copyOf(a4(i4), i3) + val suffix2 = copyOf(a4(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) + } else if(len <= WIDTH5) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) + val data = copyOfRange(a5, 1, i5) + val prefix4 = copyTail(a5(0)) + val prefix3 = copyTail(a5(0)(0)) + val prefix2 = copyTail(a5(0)(0)(0)) + val prefix1 = a5(0)(0)(0)(0) + val suffix4 = copyOf(a5(i5), i4) + val suffix3 = copyOf(a5(i5)(i4), i3) + val suffix2 = copyOf(a5(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) + } else { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) & MASK + val i6 = ((len-1) >>> BITS5) + val data = copyOfRange(a6, 1, i6) + val prefix5 = copyTail(a6(0)) + val prefix4 = copyTail(a6(0)(0)) + val prefix3 = copyTail(a6(0)(0)(0)) + val prefix2 = copyTail(a6(0)(0)(0)(0)) + val prefix1 = a6(0)(0)(0)(0)(0) + val suffix5 = copyOf(a6(i6), i5) + val suffix4 = copyOf(a6(i6)(i5), i4) + val suffix3 = copyOf(a6(i6)(i5)(i4), i3) + val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + val len12345 = len1234 + prefix5.length*WIDTH4 + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) + } + } + + override def toString: String = + s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" + + private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( + a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], + a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] + ).asInstanceOf[Array[Array[_]]] +} + + +/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ +private[immutable] object VectorInline { + // compile-time numeric constants + final val BITS = 5 + final val WIDTH = 1 << BITS + final val MASK = WIDTH - 1 + final val BITS2 = BITS * 2 + final val WIDTH2 = 1 << BITS2 + final val BITS3 = BITS * 3 + final val WIDTH3 = 1 << BITS3 + final val BITS4 = BITS * 4 + final val WIDTH4 = 1 << BITS4 + final val BITS5 = BITS * 5 + final val WIDTH5 = 1 << BITS5 + final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: + final val Log2ConcatFaster = 5 + final val AlignToFaster = 64 + + type Arr1 = Array[AnyRef] + type Arr2 = Array[Array[AnyRef]] + type Arr3 = Array[Array[Array[AnyRef]]] + type Arr4 = Array[Array[Array[Array[AnyRef]]]] + type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] + type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] + + /** Dimension of the slice at index */ + @inline def vectorSliceDim(count: Int, idx: Int): Int = { + val c = count/2 + c+1-abs(idx-c) + } + + @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = + if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) + + @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) + + @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) + + @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = + if(a.length == len) a else copyOf[T](a, len) + + @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } + @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } + @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } + @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } + @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } + + @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { + val a1c = a1.clone() + a1c(idx1) = elem.asInstanceOf[AnyRef] + a1c + } + + @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { + val a2c = a2.clone() + a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) + a2c + } + + @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { + val a3c = a3.clone() + a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) + a3c + } + + @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { + val a4c = a4.clone() + a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) + a4c + } + + @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { + val a5c = a5.clone() + a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) + a5c + } + + @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { + val a6c = a6.clone() + a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) + a6c + } + + @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { + val dest = copyOf[T](a, a.length+b.length) + System.arraycopy(b, 0, dest, a.length, b.length) + dest + } +} + + +/** Helper methods and constants for Vector. */ +private object VectorStatics { + + final def copyAppend1(a: Arr1, elem: Any): Arr1 = { + val alen = a.length + val ac = new Arr1(alen+1) + System.arraycopy(a, 0, ac, 0, alen) + ac(alen) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { + val ac = copyOf(a, a.length+1) + ac(ac.length-1) = elem + ac + } + + final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { + val ac = new Arr1(a.length+1) + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem + ac + } + + final val empty1: Arr1 = new Array(0) + final val empty2: Arr2 = new Array(0) + final val empty3: Arr3 = new Array(0) + final val empty4: Arr4 = new Array(0) + final val empty5: Arr5 = new Array(0) + final val empty6: Arr6 = new Array(0) + + final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { + var i = 0 + val len = a.length + if(level == 0) { + while(i < len) { + f(a(i).asInstanceOf[A]) + i += 1 + } + } else { + val l = level-1 + while(i < len) { + foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + } + } + + final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { + var i = 0 + while(i < a.length) { + val v1 = a(i).asInstanceOf[AnyRef] + val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] + if(v1 ne v2) + return mapElems1Rest(a, f, i, v2) + i += 1 + } + a + } + + final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { + val ac = new Arr1(a.length) + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] + i += 1 + } + ac + } + + final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { + if(n == 1) + mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] + else { + var i = 0 + while(i < a.length) { + val v1 = a(i) + val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) + if(v1 ne v2) + return mapElemsRest(n, a, f, i, v2) + i += 1 + } + a + } + } + + final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + ac.asInstanceOf[Array[T]] + } + + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) + case s => + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-prefix1.length) { + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } else null + } + + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) + case s => + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-suffix1.length) { + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } else null + } +} + + +private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable { + + private[this] var a1: Arr1 = v.prefix1 + private[this] var a2: Arr2 = _ + private[this] var a3: Arr3 = _ + private[this] var a4: Arr4 = _ + private[this] var a5: Arr5 = _ + private[this] var a6: Arr6 = _ + private[this] var a1len = a1.length + private[this] var i1 = 0 // current index in a1 + private[this] var oldPos = 0 + private[this] var len1 = totalLength // remaining length relative to a1 + + private[this] var sliceIdx = 0 + private[this] var sliceDim = 1 + private[this] var sliceStart = 0 // absolute position + private[this] var sliceEnd = a1len // absolute position + + //override def toString: String = + // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" + + @inline override def knownSize = len1 - i1 + + @inline def hasNext: Boolean = len1 > i1 + + def next(): A = { + if(i1 == a1len) advance() + val r = a1(i1) + i1 += 1 + r.asInstanceOf[A] + } + + private[this] def advanceSlice(): Unit = { + if(!hasNext) Iterator.empty.next() + sliceIdx += 1 + var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) + while(slice.length == 0) { + sliceIdx += 1 + slice = v.vectorSlice(sliceIdx) + } + sliceStart = sliceEnd + sliceDim = vectorSliceDim(sliceCount, sliceIdx) + (sliceDim: @switch) match { + case 1 => a1 = slice.asInstanceOf[Arr1] + case 2 => a2 = slice.asInstanceOf[Arr2] + case 3 => a3 = slice.asInstanceOf[Arr3] + case 4 => a4 = slice.asInstanceOf[Arr4] + case 5 => a5 = slice.asInstanceOf[Arr5] + case 6 => a6 = slice.asInstanceOf[Arr6] + } + sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) + if(sliceEnd > totalLength) sliceEnd = totalLength + if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 + } + + private[this] def advance(): Unit = { + val pos = i1-len1+totalLength + if(pos == sliceEnd) advanceSlice() + if(sliceDim > 1) { + val io = pos - sliceStart + val xor = oldPos ^ io + advanceA(io, xor) + oldPos = io + } + len1 -= i1 + a1len = mmin(a1.length, len1) + i1 = 0 + } + + private[this] def advanceA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2(0) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3(0) + a1 = a2(0) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } else { + a5 = a6(io >>> BITS5) + a4 = a5(0) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } + } + + private[this] def setA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else { + a5 = a6(io >>> BITS5) + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } + } + + override def drop(n: Int): Iterator[A] = { + if(n > 0) { + val oldpos = i1-len1+totalLength + val newpos = mmin(oldpos + n, totalLength) + if(newpos == totalLength) { + i1 = 0 + len1 = 0 + a1len = 0 + } else { + while(newpos >= sliceEnd) advanceSlice() + val io = newpos - sliceStart + if(sliceDim > 1) { + val xor = oldPos ^ io + setA(io, xor) + oldPos = io + } + a1len = a1.length + i1 = io & MASK + len1 = i1 + (totalLength-newpos) + if(a1len > len1) a1len = len1 + } + } + this + } + + override def take(n: Int): Iterator[A] = { + if(n < knownSize) { + val trunc = knownSize - mmax(0, n) + totalLength -= trunc + len1 -= trunc + if(len1 < a1len) a1len = len1 + if(totalLength < sliceEnd) sliceEnd = totalLength + } + this + } + + override def slice(from: Int, until: Int): Iterator[A] = { + val _until = + if(from > 0) { + drop(from) + until - from + } else until + take(_until) + } + + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { + val xsLen = xs.length + val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) + var copied = 0 + val isBoxed = xs.isInstanceOf[Array[AnyRef]] + while(copied < total) { + if(i1 == a1len) advance() + val count = mmin(total-copied, a1.length-i1) + if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) + else Array.copy(a1, i1, xs, start+copied, count) + i1 += count + copied += count + } + total + } + + override def toVector: Vector[A] = + v.slice(i1-len1+totalLength, totalLength) + + protected[immutable] def split(at: Int): NewVectorIterator[A] = { + val it2 = clone().asInstanceOf[NewVectorIterator[A]] + it2.take(at) + drop(at) + it2 + } +} + + +private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) + extends Stepper[A] with EfficientSplit { + + protected[this] def build(it: NewVectorIterator[A]): Semi + + final def hasStep: Boolean = it.hasNext + + final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + final def estimateSize: Long = it.knownSize + + def trySplit(): Sub = { + val len = it.knownSize + if(len > 1) build(it.split(len >>> 1)) + else null + } + + override final def iterator: Iterator[A] = it +} + +private class AnyVectorStepper[A](it: NewVectorIterator[A]) + extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { + protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) + def nextStep(): A = it.next() +} + +private class DoubleVectorStepper(it: NewVectorIterator[Double]) + extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { + protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) + def nextStep(): Double = it.next() +} + +private class IntVectorStepper(it: NewVectorIterator[Int]) + extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { + protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) + def nextStep(): Int = it.next() +} + +private class LongVectorStepper(it: NewVectorIterator[Long]) + extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { + protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) + def nextStep(): Long = it.next() +} + + +// The following definitions are needed for binary compatibility with ParVector +private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { + private[immutable] var it: NewVectorIterator[A @uncheckedVariance @uncheckedCaptures] = _ + def hasNext: Boolean = it.hasNext + def next(): A = it.next() + private[collection] def remainingElementCount: Int = it.size + private[collection] def remainingVector: Vector[A] = it.toVector +} diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala new file mode 100644 index 000000000000..0860a0b47f28 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala @@ -0,0 +1,277 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. + * + * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense + * of using extra memory and generally lower performance for other operations + * + * @tparam K the type of the keys contained in this vector map. + * @tparam V the type of the values associated with the keys in this vector map. + * + * @define coll immutable vector map + * @define Coll `immutable.VectorMap` + */ +final class VectorMap[K, +V] private ( + private[immutable] val fields: Vector[Any], + private[immutable] val underlying: Map[K, (Int, V)], dropped: Int) + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] + with MapFactoryDefaults[K, V, VectorMap, Iterable] { + + import VectorMap._ + + override protected[this] def className: String = "VectorMap" + + private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = { + this(fields, underlying, 0) + } + + override val size = underlying.size + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { + underlying.get(key) match { + case Some((slot, _)) => + new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) + case None => + new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) + } + } + + override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = + new Map.WithDefault(this, d) + + override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = + new Map.WithDefault[K, V1](this, _ => d) + + def get(key: K): Option[V] = underlying.get(key) match { + case Some(v) => Some(v._2) + case None => None + } + + @tailrec + private def nextValidField(slot: Int): (Int, K) = { + if (slot >= fields.size) (-1, null.asInstanceOf[K]) + else fields(slot) match { + case Tombstone(distance) => + nextValidField(slot + distance) + case k => + (slot, k.asInstanceOf[K]) + } + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val fieldsLength = fields.length + private[this] var slot = -1 + private[this] var key: K = null.asInstanceOf[K] + + private[this] def advance(): Unit = { + val nextSlot = slot + 1 + if (nextSlot >= fieldsLength) { + slot = fieldsLength + key = null.asInstanceOf[K] + } else { + nextValidField(nextSlot) match { + case (-1, _) => + slot = fieldsLength + key = null.asInstanceOf[K] + case (s, k) => + slot = s + key = k + } + } + } + + advance() + + override def hasNext: Boolean = slot < fieldsLength + + override def next(): (K, V) = { + if (!hasNext) throw new NoSuchElementException("next called on depleted iterator") + val result = (key, underlying(key)._2) + advance() + result + } + } + + // No-Op overrides to allow for more efficient steppers in a minor release. + // Refining the return type to `S with EfficientSplit` is binary compatible. + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) + + + def removed(key: K): VectorMap[K, V] = { + if (isEmpty) empty + else { + var fs = fields + val sz = fs.size + underlying.get(key) match { + case Some(_) if size == 1 => empty + case Some((slot, _)) => + val s = slot - dropped + + // Calculate next of kin + val next = + if (s < sz - 1) fs(s + 1) match { + case Tombstone(d) => s + d + 1 + case _ => s + 1 + } else s + 1 + + fs = fs.updated(s, Tombstone(next - s)) + + // Calculate first index of preceding tombstone sequence + val first = + if (s > 0) { + fs(s - 1) match { + case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 + case Tombstone(d) if d == 1 => s - 1 + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case _ => s + } + }else s + fs = fs.updated(first, Tombstone(next - first)) + + // Calculate last index of succeeding tombstone sequence + val last = next - 1 + if (last != first) { + fs = fs.updated(last, Tombstone(first - 1 - last)) + } + new VectorMap(fs, underlying - key, dropped) + case _ => + this + } + } + } + + override def mapFactory: MapFactory[VectorMap] = VectorMap + + override def contains(key: K): Boolean = underlying.contains(key) + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + val lastSlot = fields.length - 1 + val last = fields.last match { + case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] + case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => k.asInstanceOf[K] + } + (last, underlying(last)._2) + } + + override def lastOption: Option[(K, V)] = { + if (isEmpty) None + else Some(last) + } + + override def tail: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + val (slot, key) = nextValidField(0) + new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) + } + + override def init: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + val lastSlot = fields.size - 1 + val (slot, key) = fields.last match { + case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) + case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => (lastSlot, k.asInstanceOf[K]) + } + new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) + } + + override def keys: Vector[K] = keysIterator.toVector + + override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { + override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) + } +} + +object VectorMap extends MapFactory[VectorMap] { + //Class to mark deleted slots in 'fields'. + //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' + // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). + //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' + // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. + //For other deleted slots, it simply indicates that they have been deleted. + private[VectorMap] final case class Tombstone(distance: Int) + + private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = + new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) + + def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] = + it match { + case vm: VectorMap[K, V] => vm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] +} + +private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { + private[this] val vectorBuilder = new VectorBuilder[K] + private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] + private[this] var aliased: VectorMap[K, V] @uncheckedCaptures = _ // OK since VectorMapBuilder is private + + override def clear(): Unit = { + vectorBuilder.clear() + mapBuilder.clear() + aliased = null + } + + override def result(): VectorMap[K, V] = { + if (aliased eq null) { + aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) + } + aliased + } + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + mapBuilder.getOrElse(key, null) match { + case (slot, _) => + mapBuilder.addOne(key, (slot, value)) + case null => + val vectorSize = vectorBuilder.size + vectorBuilder.addOne(key) + mapBuilder.addOne(key, (vectorSize, value)) + } + } + this + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) +} From eea6bca2861eee47b68e02fe17b6d09508a6ba57 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 17:33:44 +0100 Subject: [PATCH 113/216] Add immutable TreeSet and TreeMap to stdlib --- .../stdlib/collection/BuildFrom.scala | 2 +- .../stdlib/collection/immutable/TreeMap.scala | 372 ++++++++++++++++++ .../stdlib/collection/immutable/TreeSet.scala | 297 ++++++++++++++ .../collection/immutable/WrappedString.scala | 142 +++++++ .../stdlib/collection/immutable/package.scala | 29 ++ 5 files changed, 841 insertions(+), 1 deletion(-) create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/WrappedString.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/package.scala diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala index 15a8acf2e6ef..0a3cc199d4dc 100644 --- a/tests/pos-special/stdlib/collection/BuildFrom.scala +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -31,7 +31,7 @@ trait BuildFrom[-From, -A, +C] extends Any { self => def fromSpecific(from: From)(it: IterableOnce[A]^): C // !!! this is wrong, we need two versions of fromSpecific; one mapping // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. - // But that requires a lareg scale refactoring of BuildFrom. The unsafeAssumePure + // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure // calls in this file are needed to sweep that problem under the carpet. /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala new file mode 100644 index 000000000000..ff01ad7806ec --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala @@ -0,0 +1,372 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{RedBlackTree => RB} +import scala.collection.mutable.ReusableBuilder +import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** An immutable SortedMap whose values are stored in a red-black tree. + * + * This class is optimal when range queries will be performed, + * or when traversal in order of an ordering is desired. + * If you only need key lookups, and don't care in which order key-values + * are traversed in, consider using * [[scala.collection.immutable.HashMap]], + * which will generally have better performance. If you need insertion order, + * consider a * [[scala.collection.immutable.SeqMap]], which does not need to + * have an ordering supplied. + * + * @example {{{ + * import scala.collection.immutable.TreeMap + * + * // Make a TreeMap via the companion object factory + * val weekdays = TreeMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * @param ordering the implicit ordering used to compare objects of type `A`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll immutable.TreeMap + * @define coll immutable tree map + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + def this()(implicit ordering: Ordering[K]) = this(null)(ordering) + private[immutable] def tree0: RB.Tree[K, V] = tree + + private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) + + override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap + + def iterator: Iterator[(K, V)] = RB.iterator(tree) + + def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) + + override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) + + def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) + + override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( + size, tree, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) + } + s.asInstanceOf[S with EfficientSplit] + } + + def get(key: K): Option[V] = RB.get(tree, key) + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val resultOrNull = RB.lookup(tree, key) + if (resultOrNull eq null) default + else resultOrNull.value + } + + def removed(key: K): TreeMap[K,V] = + newMapOrSelf(RB.delete(tree, key)) + + def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = + newMapOrSelf(RB.update(tree, key, value, overwrite = true)) + + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] = + newMapOrSelf(that match { + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => + RB.union(tree, tm.tree) + case ls: LinearSeq[(K,V1)] => + if (ls.isEmpty) tree //to avoid the creation of the adder + else { + val adder = new Adder[V1] + adder.addAll(ls) + adder.finalTree + } + case _ => + val adder = new Adder[V1] + val it = that.iterator + while (it.hasNext) { + adder.apply(it.next()) + } + adder.finalTree + }) + + override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match { + case ts: TreeSet[K] if ordering == ts.ordering => + newMapOrSelf(RB.difference(tree, ts.tree)) + case _ => super.removedAll(keys) + } + + /** A new TreeMap with the entry added is returned, + * assuming that key is not in the TreeMap. + * + * @tparam V1 type of the values of the new bindings, a supertype of `V` + * @param key the key to be inserted + * @param value the value to be associated with `key` + * @return a new $coll with the inserted binding, if it wasn't present in the map + */ + @deprecated("Use `updated` instead", "2.13.0") + def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { + assert(!RB.contains(tree, key)) + updated(key, value) + } + + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + override def size: Int = RB.count(tree) + override def knownSize: Int = size + + override def isEmpty = size == 0 + + override def firstKey: K = RB.smallest(tree).key + + override def lastKey: K = RB.greatest(tree).key + + override def head: (K, V) = { + val smallest = RB.smallest(tree) + (smallest.key, smallest.value) + } + + override def last: (K, V) = { + val greatest = RB.greatest(tree) + (greatest.key, greatest.value) + } + + override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) + + override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) + + override def drop(n: Int): TreeMap[K, V] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeMap(RB.drop(tree, n)) + } + + override def take(n: Int): TreeMap[K, V] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeMap(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int) = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeMap(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: ((K, V)) => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + + override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) + + override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) + + override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) + + override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = + newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) + + override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { + val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) + (newMapOrSelf(l), newMapOrSelf(r)) + } + + override def transform[W](f: (K, V) => W): TreeMap[K, W] = { + val t2 = RB.transform[K, V, W](tree, f) + if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] + else new TreeMap(t2) + } + + private final class Adder[B1 >: V] + extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { + private var currentMutableTree: RB.Tree[K,B1] @uncheckedCaptures = tree0 + def finalTree = beforePublish(currentMutableTree) + override def apply(kv: (K, B1)): Unit = { + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + } + @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { + if (!ls.isEmpty) { + val kv = ls.head + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + addAll(ls.tail) + } + } + } + override def equals(obj: Any): Boolean = obj match { + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeMap" +} + +/** $factoryInfo + * @define Coll immutable.TreeMap + * @define coll immutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() + + def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] = + it match { + case tm: TreeMap[K, V] if ordering == tm.ordering => tm + case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => + new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) + case _ => + var t: RB.Tree[K, V] = null + val i = it.iterator + while (i.hasNext) { + val (k, v) = i.next() + t = RB.update(t, k, v, overwrite = true) + } + new TreeMap[K, V](t) + } + + def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] + + private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) + extends RB.MapHelper[K, V] + with ReusableBuilder[(K, V), TreeMap[K, V]] { + type Tree = RB.Tree[K, V] + private var tree:Tree @uncheckedCaptures = null + + def addOne(elem: (K, V)): this.type = { + tree = mutableUpd(tree, elem._1, elem._2) + this + } + private object adder extends AbstractFunction2[K, V, Unit] { + // we cache tree to avoid the outer access to tree + // in the hot path (apply) + private[this] var accumulator: Tree @uncheckedCaptures = null + def addForEach(hasForEach: collection.Map[K, V]): Unit = { + accumulator = tree + hasForEach.foreachEntry(this) + tree = accumulator + // be friendly to GC + accumulator = null + } + + override def apply(key: K, value: V): Unit = { + accumulator = mutableUpd(accumulator, key, value) + } + } + + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeMap[K, V] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0) + case that: collection.Map[K, V] => + //add avoiding creation of tuples + adder.addForEach(that) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala new file mode 100644 index 000000000000..c4241b818c38 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala @@ -0,0 +1,297 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.collection.immutable.{RedBlackTree => RB} +import scala.runtime.AbstractFunction1 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** This class implements immutable sorted sets using a tree. + * + * @tparam A the type of the elements contained in this tree set + * @param ordering the implicit ordering used to compare objects of type `A` + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) throw new NullPointerException("ordering must not be null") + + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) + + override def sortedIterableFactory = TreeSet + + private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) + + override def size: Int = RB.count(tree) + + override def isEmpty = size == 0 + + override def head: A = RB.smallest(tree).key + + override def last: A = RB.greatest(tree).key + + override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) + + override def init: TreeSet[A] = new TreeSet(RB.init(tree)) + + override def min[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + head + } else { + super.min(ord) + } + } + + override def max[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + last + } else { + super.max(ord) + } + } + + override def drop(n: Int): TreeSet[A] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeSet(RB.drop(tree, n)) + } + + override def take(n: Int): TreeSet[A] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeSet(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int): TreeSet[A] = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeSet(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: A => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) + + override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) + + override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + override def minAfter(key: A): Option[A] = { + val v = RB.minAfter(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + override def maxBefore(key: A): Option[A] = { + val v = RB.maxBefore(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + def iterator: Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[A, Any] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Checks if this set contains element `elem`. + * + * @param elem the element to check for membership. + * @return true, iff `elem` is contained in this set. + */ + def contains(elem: A): Boolean = RB.contains(tree, elem) + + override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) + + /** Creates a new `TreeSet` with the entry added. + * + * @param elem a new element to add. + * @return a new $coll containing `elem` and all the elements of this $coll. + */ + def incl(elem: A): TreeSet[A] = + newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) + + /** Creates a new `TreeSet` with the entry removed. + * + * @param elem a new element to add. + * @return a new $coll containing all the elements of this $coll except `elem`. + */ + def excl(elem: A): TreeSet[A] = + newSetOrSelf(RB.delete(tree, elem)) + + override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { + val t = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + RB.union(tree, ts.tree) + case _ => + val it = that.iterator + var t = tree + while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) + t + } + newSetOrSelf(t) + } + + override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + //TODO add an implementation of a mutable subtractor similar to TreeMap + //but at least this doesn't create a TreeSet for each iteration + object sub extends AbstractFunction1[A, Unit] { + var currentTree = tree + override def apply(k: A): Unit = { + currentTree = RB.delete(currentTree, k) + } + } + that.iterator.foreach(sub) + newSetOrSelf(sub.currentTree) + } + + override def intersect(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.intersect(tree, ts.tree)) + case _ => + super.intersect(that) + } + + override def diff(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + super.diff(that) + } + + override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) + + override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { + val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) + (newSetOrSelf(l), newSetOrSelf(r)) + } + + override def equals(obj: Any): Boolean = obj match { + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeSet" +} + +/** + * $factoryInfo + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] + + def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => ts + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) + // The cast is needed to compile with Dotty: + // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound + new TreeSet[E](tree) + case _ => + var t: RB.Tree[E, Null] = null + val i = it.iterator + while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] + private class TreeSetBuilder[A](implicit ordering: Ordering[A]) + extends RB.SetHelper[A] + with ReusableBuilder[A, TreeSet[A]] { + type Tree = RB.Tree[A, Any] + private [this] var tree:RB.Tree[A @uncheckedCaptures, Any] = null + + override def addOne(elem: A): this.type = { + tree = mutableUpd(tree, elem) + this + } + + override def addAll(xs: IterableOnce[A]^): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeSet[A] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree + else tree = RB.union(beforePublish(tree), ts.tree)(ordering) + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala new file mode 100644 index 000000000000..47fe769c81ef --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala @@ -0,0 +1,142 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.Predef.{wrapString => _, assert} +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.CharStringStepper +import scala.collection.mutable.{Builder, StringBuilder} +import language.experimental.captureChecking + +/** + * This class serves as a wrapper augmenting `String`s with all the operations + * found in indexed sequences. + * + * The difference between this class and `StringOps` is that calling transformer + * methods such as `filter` and `map` will yield an object of type `WrappedString` + * rather than a `String`. + * + * @param self a string contained within this wrapped string + * + * @define Coll `WrappedString` + * @define coll wrapped string + */ +@SerialVersionUID(3L) +final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, WrappedString] + with Serializable + with Pure { + + def apply(i: Int): Char = self.charAt(i) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder + override def empty: WrappedString = WrappedString.empty + + override def slice(from: Int, until: Int): WrappedString = { + val start = if (from < 0) 0 else from + if (until <= start || start >= self.length) + return WrappedString.empty + + val end = if (until > length) length else until + new WrappedString(self.substring(start, end)) + } + override def length = self.length + override def toString = self + override def view: StringView = new StringView(self) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { + val st = new CharStringStepper(self, 0, self.length) + val r = + if (shape.shape == StepperShape.CharShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean = + that match { + case s: WrappedString => self.startsWith(s.self, offset) + case _ => super.startsWith(that, offset) + } + + override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean = + that match { + case s: WrappedString => self.endsWith(s.self) + case _ => super.endsWith(that) + } + + override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { + case c: Char => self.indexOf(c, from) + case _ => super.indexOf(elem, from) + } + + override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = + elem match { + case c: Char => self.lastIndexOf(c, end) + case _ => super.lastIndexOf(elem, end) + } + + override def copyToArray[sealed B >: Char](xs: Array[B], start: Int, len: Int): Int = + (xs: Any) match { + case chs: Array[Char] => + val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) + self.getChars(0, copied, chs, start) + copied + case _ => super.copyToArray(xs, start, len) + } + + override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] = + suffix match { + case s: WrappedString => new WrappedString(self concat s.self) + case _ => super.appendedAll(suffix) + } + + override def sameElements[B >: Char](o: IterableOnce[B]^) = o match { + case s: WrappedString => self == s.self + case _ => super.sameElements(o) + } + + override protected[this] def className = "WrappedString" + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + override def equals(other: Any): Boolean = other match { + case that: WrappedString => + this.self == that.self + case _ => + super.equals(other) + } +} + +/** A companion object for wrapped strings. + */ +@SerialVersionUID(3L) +object WrappedString extends SpecificIterableFactory[Char, WrappedString] { + def fromSpecific(it: IterableOnce[Char]^): WrappedString = { + val b = newBuilder + val s = it.knownSize + if(s >= 0) b.sizeHint(s) + b ++= it + b.result() + } + val empty: WrappedString = new WrappedString("") + def newBuilder: Builder[Char, WrappedString] = + new StringBuilder().mapResult(x => new WrappedString(x)) + + implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { + def unwrap: String = value.self + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala new file mode 100644 index 000000000000..985ef22859be --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/package.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + +package object immutable { + type StringOps = scala.collection.StringOps + val StringOps = scala.collection.StringOps + type StringView = scala.collection.StringView + val StringView = scala.collection.StringView + + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + + @deprecated("Use Map instead of DefaultMap", "2.13.0") + type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] +} From 4625afc776dd8eaba3904b4bcaa090b2be18ab1f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 18:01:38 +0100 Subject: [PATCH 114/216] Add immutable TreeSeqMap to stdlib --- .../collection/immutable/TreeSeqMap.scala | 651 ++++++++++++++++++ 1 file changed, 651 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala new file mode 100644 index 000000000000..d7cceb54cca3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -0,0 +1,651 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** This class implements an immutable map that preserves order using + * a hash map for the key to value mapping to provide efficient lookup, + * and a tree for the ordering of the keys to provide efficient + * insertion/modification order traversal and destructuring. + * + * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) + * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) + * can be used instead if so specified at creation. + * + * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method + * can be used to switch to the specified ordering for the returned map. + * + * A key can be manually refreshed (i.e. placed at the end) via the + * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in + * use). + * + * Internally, an ordinal counter is increased for each insertion/modification + * and then the current ordinal is used as key in the tree map. After 2^32^ + * insertions/modifications the entire map is copied (thus resetting the ordinal + * counter). + * + * @tparam K the type of the keys contained in this map. + * @tparam V the type of the values associated with the keys in this map. + * @define coll immutable tree seq map + * @define Coll `immutable.TreeSeqMap` + */ +final class TreeSeqMap[K, +V] private ( + private val ordering: TreeSeqMap.Ordering[K], + private val mapping: TreeSeqMap.Mapping[K, V], + private val ordinal: Int, + val orderedBy: TreeSeqMap.OrderBy) + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] + with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { + + import TreeSeqMap._ + + override protected[this] def className: String = "TreeSeqMap" + + override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap + + override val size = mapping.size + + override def knownSize: Int = size + + override def isEmpty = size == 0 + + /* + // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible + // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. + override def empty = TreeSeqMap.empty[K, V](orderedBy) + */ + + def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == this.orderedBy) this + else if (isEmpty) TreeSeqMap.empty(orderBy) + else new TreeSeqMap(ordering, mapping, ordinal, orderBy) + } + + def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { + mapping.get(key) match { + case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => + // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. + TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) + case Some((o, _)) if orderedBy == OrderBy.Insertion => + new TreeSeqMap( + ordering.include(o, key), + mapping.updated[(Int, V1)](key, (o, value)), + ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. + orderedBy) + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + case None => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + } + } + + def removed(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + new TreeSeqMap( + ordering.exclude(o), + mapping.removed(key), + ordinal, + orderedBy) + case None => + this + } + } + + def refresh(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping, + o1, + orderedBy) + case None => + this + } + } + + def get(key: K): Option[V] = mapping.get(key).map(value) + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): (K, V) = binding(iter.next()) + } + + override def keysIterator: Iterator[K] = new AbstractIterator[K] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): K = iter.next() + } + + override def valuesIterator: Iterator[V] = new AbstractIterator[V] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): V = value(binding(iter.next())) + } + + override def contains(key: K): Boolean = mapping.contains(key) + + override def head: (K, V) = binding(ordering.head) + + override def headOption = ordering.headOption.map(binding) + + override def last: (K, V) = binding(ordering.last) + + override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) + + override def tail: TreeSeqMap[K, V] = { + val (head, tail) = ordering.headTail + new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) + } + + override def init: TreeSeqMap[K, V] = { + val (init, last) = ordering.initLast + new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) + } + + override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { + val sz = size + if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) + else { + val sz = size + val f = if (from >= 0) from else 0 + val u = if (until <= sz) until else sz + val l = u - f + if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) + else if (l > sz / 2) { + // Remove front and rear incrementally if majority of elements are to be kept + val (front, rest) = ordering.splitAt(f) + val (ong, rear) = rest.splitAt(l) + var mng = this.mapping + val frontIter = front.iterator + while (frontIter.hasNext) { + mng = mng - frontIter.next() + } + val rearIter = rear.iterator + while (rearIter.hasNext) { + mng = mng - rearIter.next() + } + new TreeSeqMap(ong, mng, ordinal, orderedBy) + } else { + // Populate with builder otherwise + val bdr = newBuilder[K @uncheckedCaptures, V @uncheckedCaptures](orderedBy) + val iter = ordering.iterator + var i = 0 + while (i < f) { + iter.next() + i += 1 + } + while (i < u) { + val k = iter.next() + bdr.addOne((k, mapping(k)._2)) + i += 1 + } + bdr.result() + } + } + } + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val (k2, v2) = f((k, v)) + bdr.addOne((k2, v2)) + } + bdr.result() + } + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val jter = f((k, v)).iterator + while (jter.hasNext) { + val (k2, v2) = jter.next() + bdr.addOne((k2, v2)) + } + } + bdr.result() + } + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) + } + bdr.result() + } + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = { + var ong: Ordering[K] = ordering + var mng: Mapping[K, V2] = mapping + var ord = increment(ordinal) + val iter = suffix.iterator + while (iter.hasNext) { + val (k, v2) = iter.next() + mng.get(k) match { + case Some((o, v)) => + if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) + else if (orderedBy == OrderBy.Modification) { + mng = mng.updated(k, (ord, v2)) + ong = ong.exclude(o).append(ord, k) + ord = increment(ord) + } + case None => + mng = mng.updated(k, (ord, v2)) + ong = ong.append(ord, k) + ord = increment(ord) + } + } + new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) + } + + @`inline` private[this] def value(p: (_, V)) = p._2 + @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) +} +object TreeSeqMap extends MapFactory[TreeSeqMap] { + sealed trait OrderBy + object OrderBy { + case object Insertion extends OrderBy + case object Modification extends OrderBy + } + + private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) + private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) + val Empty = EmptyByInsertion + def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) + def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == OrderBy.Modification) EmptyByModification + else EmptyByInsertion + }.asInstanceOf[TreeSeqMap[K, V]] + + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] = + it match { + case om: TreeSeqMap[K, V] => om + case _ => (newBuilder[K, V] ++= it).result() + } + + @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 + + def newBuilder[sealed K, sealed V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[sealed K, sealed V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + + final class Builder[sealed K, sealed V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + private[this] val bdr = new MapBuilderImpl[K, (Int, V)] + private[this] var ong = Ordering.empty[K] + private[this] var ord = 0 + private[this] var aliased: TreeSeqMap[K, V] = _ + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + bdr.getOrElse(key, null) match { + case (o, v) => + if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) + else if (orderedBy == OrderBy.Modification) { + bdr.addOne(key, (ord, value)) + ong = ong.exclude(o).appendInPlace(ord, key) + ord = increment(ord) + } + case null => + bdr.addOne(key, (ord, value)) + ong = ong.appendInPlace(ord, key) + ord = increment(ord) + } + } + this + } + + override def clear(): Unit = { + ong = Ordering.empty + ord = 0 + bdr.clear() + aliased = null + } + + override def result(): TreeSeqMap[K, V] = { + if (aliased eq null) { + aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) + } + aliased + } + } + + private type Mapping[K, +V] = Map[K, (Int, V)] + @annotation.unused + private val Mapping = Map + + /* The ordering implementation below is an adapted version of immutable.IntMap. */ + private[immutable] object Ordering { + import scala.collection.generic.BitOperations.Int._ + + @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" + + def empty[T] : Ordering[T] = Zero + + def apply[T](elems: (Int, T)*): Ordering[T] = + elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) + + // Iterator over a non-empty Ordering. + final class Iterator[+V](it: Ordering[V]) { + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 Bins and + // one Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 + private[this] var index = 0 + private[this] val buffer = new Array[AnyRef](33) + + private[this] def pop = { + index -= 1 + buffer(index).asInstanceOf[Ordering[V]] + } + + private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + + if (it != Zero) push(it) + + def hasNext = index != 0 + @tailrec + def next(): V = + pop match { + case Bin(_,_, Tip(_, v), right) => + push(right) + v + case Bin(_, _, left, right) => + push(right) + push(left) + next() + case Tip(_, v) => v + // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering + // and don't return an Ordering.Iterator for Ordering.Zero. + case Zero => throw new IllegalStateException("empty subtree not allowed") + } + } + + object Iterator { + val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) + def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] + } + + case object Zero extends Ordering[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any): Boolean = that match { + case _: this.type => true + case _: Ordering[_] => false // The only empty Orderings are eq Nil + case _ => super.equals(that) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" + } + + final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] + else Tip(ord, s) + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" + } + + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T @uncheckedCaptures] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] + else Bin[S](prefix, mask, left, right) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { + sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" + left.format(sb, subPrefix + "├── ", subPrefix + "│ ") + right.format(sb, subPrefix + "└── ", subPrefix + " ") + } + } + + private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) Bin(p, m, t1, t2) + else Bin(p, m, t2, t1) + } + + private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { + case (l, Zero) => l + case (Zero, r) => r + case (l, r) => Bin(prefix, mask, l, r) + } + } + + sealed abstract class Ordering[+T] { + import Ordering._ + import scala.annotation.tailrec + import scala.collection.generic.BitOperations.Int._ + + override final def toString: String = format + final def format: String = { + val sb = new StringBuilder + format(sb, "", "") + sb.toString() + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit + + @tailrec + final def head: T = this match { + case Zero => throw new NoSuchElementException("head of empty map") + case Tip(k, v) => v + case Bin(_, _, l, _) => l.head + } + + @tailrec + final def headOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, l, _) => l.headOption + } + + @tailrec + final def last: T = this match { + case Zero => throw new NoSuchElementException("last of empty map") + case Tip(_, v) => v + case Bin(_, _, _, r) => r.last + } + + @tailrec + final def lastOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, _, r) => r.lastOption + } + + @tailrec + final def ordinal: Int = this match { + case Zero => 0 + case Tip(o, _) => o + case Bin(_, _, _, r) => r.ordinal + } + + final def tail: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("tail of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => bin(p, m, l.tail, r) + } + + final def headTail: (T, Ordering[T]) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (v, Zero) + case Bin(p, m, l, r) => + val (head, tail) = l.headTail + (head, bin(p, m, tail, r)) + } + + final def init: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => + bin(p, m, l, r.init) + } + + final def initLast: (Ordering[T], T) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (Zero, v) + case Bin(p, m, l, r) => + val (init, last) = r.initLast + (bin(p, m, l, init), last) + } + + final def iterator: Iterator[T] = this match { + case Zero => Iterator.empty + case _ => new Iterator(this) + } + + final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) + else Bin(p, m, l, r.include(ordinal, value)) + } + + final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else Bin(p, m, l, r.append(ordinal, value)) + } + + @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) + private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) if o >= ordinal => + throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") + case Tip(o, _) if parent == null => + join(ordinal, Tip(ordinal, value), o, this) + case Tip(o, _) => + parent.right = join(ordinal, Tip(ordinal, value), o, this) + parent + case b @ Bin(p, m, _, r) => + if (!hasMatch(ordinal, p, m)) { + val b2 = join(ordinal, Tip(ordinal, value), p, this) + if (parent != null) { + parent.right = b2 + parent + } else b2 + } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else { + r.appendInPlace1(b, ordinal, value) + this + } + } + + final def exclude(ordinal: Int): Ordering[T] = this match { + case Zero => + Zero + case Tip(o, _) => + if (ordinal == o) Zero + else this + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) this + else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) + else bin(p, m, l, r.exclude(ordinal)) + } + + final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { + var rear = Ordering.empty[T] + var i = n + (modifyOrRemove { (o, v) => + i -= 1 + if (i >= 0) Some(v) + else { + rear = rear.appendInPlace(o, v) + None + } + }, rear) + } + + /** + * A combined transform and filter function. Returns an `Ordering` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value) == Some(x)` the + * map contains `(key, x)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { + case Zero => Zero + case Tip(key, value) => + f(key, value) match { + case None => Zero + case Some(value2) => + // hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] + else Tip(key, value2) + } + case Bin(prefix, mask, left, right) => + val l = left.modifyOrRemove(f) + val r = right.modifyOrRemove(f) + if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] + else bin(prefix, mask, l, r) + } + } +} From 9ba2c39e8a689b5e5c6048efb5bd82dda91d4bee Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 19:16:23 +0100 Subject: [PATCH 115/216] Add unchecked LazyList to stdlib --- .../collection/immutable/LazyList.scala | 1381 +++++++++++++++++ 1 file changed, 1381 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/immutable/LazyList.scala diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyList.scala new file mode 100644 index 000000000000..8b7ad26dc5ae --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LazyList.scala @@ -0,0 +1,1381 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} +import scala.language.implicitConversions +import scala.runtime.Statics + +/** This class implements an immutable linked list. We call it "lazy" + * because it computes its elements only when they are needed. + * + * Elements are memoized; that is, the value of each element is computed at most once. + * + * Elements are computed in-order and are never skipped. In other words, + * accessing the tail causes the head to be computed first. + * + * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * don't know yet whether the list is empty or not. If you learn that it is non-empty, + * then you also know that the head has been computed. But the tail is itself + * a `LazyList`, whose emptiness-or-not might remain undetermined. + * + * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, + * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. + * + * Here is an example: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } + * fibs.take(5).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * }}} + * + * To illustrate, let's add some output to the definition `fibs`, so we + * see what's going on. + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: + * fibs.zip(fibs.tail).map{ n => + * println(s"Adding \${n._1} and \${n._2}") + * n._1 + n._2 + * } + * fibs.take(5).foreach(println) + * fibs.take(6).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 + * // 1 + * // Adding 1 and 1 + * // 2 + * // Adding 1 and 2 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 + * // 5 + * }}} + * + * Note that the definition of `fibs` uses `val` not `def`. The memoization of the + * `LazyList` requires us to have somewhere to store the information and a `val` + * allows us to do that. + * + * Further remarks about the semantics of `LazyList`: + * + * - Though the `LazyList` changes as it is accessed, this does not + * contradict its immutability. Once the values are memoized they do + * not change. Values that have yet to be memoized still "exist", they + * simply haven't been computed yet. + * + * - One must be cautious of memoization; it can eat up memory if you're not + * careful. That's because memoization of the `LazyList` creates a structure much like + * [[scala.collection.immutable.List]]. As long as something is holding on to + * the head, the head holds on to the tail, and so on recursively. + * If, on the other hand, there is nothing holding on to the head (e.g. if we used + * `def` to define the `LazyList`) then once it is no longer being used directly, + * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. + * + * Here's another example. Let's start with the natural numbers and iterate + * over them. + * + * {{{ + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyList definition will be a val definition + * val lazylist1: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * val it1 = lazylist1.iterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyList such that all we have is the Iterator left + * // and allow the LazyList to be garbage collected as required. Using a def + * // to provide the LazyList ensures that no val is holding onto the head as + * // is the case with lazylist1 + * def lazylist2: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.iterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyList at all for such a simple + * // problem. There's no reason to use a LazyList if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. + * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known, then the act + * of determining `tail` would require the evaluation of `tail`, so the + * computation would be unable to progress, as in this code: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyList[Int] = { + * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * The head, the tail and whether the list is empty or not can be initially unknown. + * Once any of those are evaluated, they are all known, though if the tail is + * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating + * the tails content is deferred until the tails empty status, head or tail is + * evaluated. + * + * Delaying the evaluation of whether a LazyList is empty or not until it's needed + * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * + * Only when it's further evaluated (which may be never!) any of the elements gets + * forced. + * + * for example: + * + * {{{ + * def tailWithSideEffect: LazyList[Nothing] = { + * println("getting empty LazyList") + * LazyList.empty + * } + * + * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * + * val suspended = 1 #:: tailWithSideEffect // doesn't print anything + * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed + * val filtered = tail.filter(_ => false) // still nothing is printed + * filtered.isEmpty // prints "getting empty LazyList" + * }}} + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for more information. + * @define Coll `LazyList` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(3L) +final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, LazyList, LazyList[A]] + with IterableFactoryDefaults[A, LazyList] + with Serializable { + import LazyList._ + + @volatile private[this] var stateEvaluated: Boolean = false + @inline private def stateDefined: Boolean = stateEvaluated + private[this] var midEvaluation = false + + private lazy val state: State[A] = { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (midEvaluation) { + throw new RuntimeException("self-referential LazyList or a derivation thereof has no more elements") + } + midEvaluation = true + val res = try lazyState() finally midEvaluation = false + // if we set it to `true` before evaluating, we may infinite loop + // if something expects `state` to already be evaluated + stateEvaluated = true + lazyState = null // allow GC + res + } + + override def iterableFactory: SeqFactory[LazyList] = LazyList + + override def isEmpty: Boolean = state eq State.Empty + + /** @inheritdoc + * + * $preservesLaziness + */ + override def knownSize: Int = if (knownIsEmpty) 0 else -1 + + override def head: A = state.head + + override def tail: LazyList[A] = state.tail + + @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) + @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyList(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyList[A] = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A] = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyList as elements + * are consumed. + * @note This function will force the realization of the entire LazyList + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyList specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // State.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef = + if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + + override protected[this] def className = "LazyList" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case coll if coll.knownSize == 0 => State.Empty + case coll => stateFromIterator(coll.iterator) + } + else sCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(suffix) + else lazyAppendedAll(suffix) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appended[B >: A](elem: B): LazyList[B] = + if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + else lazyAppendedAll(Iterator.single(elem)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = + if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + else newLL(scanLeftState(z)(op)) + + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + sCons( + z, + newLL { + if (isEmpty) State.Empty + else tail.scanLeftState(op(z, head))(op) + } + ) + + /** LazyList specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: LazyList[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + val (left, right) = map(f).partition(_.isLeft) + (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = + new LazyList.WithFilter(coll, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(prefix) + else if (prefix.knownSize == 0) this + else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B](f: A => B): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else (mapImpl(f): @inline) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + + private def mapImpl[B](f: A => B): LazyList[B] = + newLL { + if (isEmpty) State.Empty + else sCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.collectImpl(this, pf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = + if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty + else newLL(zipState(that.iterator)) + + private def zipState[B](it: Iterator[B]): State[(A, B)] = + if (this.isEmpty || !it.hasNext) State.Empty + else sCons((head, it.next()), newLL { tail zipState it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + if (this.knownIsEmpty) { + if (that.knownSize == 0) LazyList.empty + else LazyList.continually(thisElem) zip that + } else { + if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + else newLL(zipAllState(that.iterator, thisElem, thatElem)) + } + } + + private def zipAllState[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): State[(A1, B)] = { + if (it.hasNext) { + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) + else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) + } else { + if (this.isEmpty) State.Empty + else sCons((this.head, thatElem), this.tail zip LazyList.continually(thatElem)) + } + } + + /** @inheritdoc + * + * This method is not particularly useful for a lazy list, as [[zip]] already preserves + * laziness. + * + * The `collection.LazyZip2` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + // just in case it can be meaningfully overridden at some point + override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, LazyList.this.type] = + super.lazyZip(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + (map(asPair(_)._1), map(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyList[A] = + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else LazyList.dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyList[A] = { + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + dropRightState(scout) + } + } + + private def dropRightState(scout: LazyList[_]): State[A] = + if (scout.isEmpty) State.Empty + else sCons(head, newLL(tail.dropRightState(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeImpl(n): @inline) + + private def takeImpl(n: Int): LazyList[A] = { + if (n <= 0) LazyList.empty + else newLL { + if (isEmpty) State.Empty + else sCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeWhileImpl(p): @inline) + + private def takeWhileImpl(p: A => Boolean): LazyList[A] = + newLL { + if (isEmpty || !p(head)) State.Empty + else sCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyList[A] = + if (n <= 0 || knownIsEmpty) LazyList.empty + else LazyList.takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + + // need contravariant type B to make the compiler happy - still returns LazyList[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(sCons(head, tl))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else super.diff(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else super.intersect(that) + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyList[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def padTo[B >: A](len: Int, elem: B): LazyList[B] = { + if (len <= 0) this + else newLL { + if (isEmpty) LazyList.fill(len)(elem).state + else sCons(head, tail.padTo(len - 1, elem)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + if (knownIsEmpty) LazyList from other + else patchImpl(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + newLL { + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) + else if (isEmpty) stateFromIterator(other.iterator) + else sCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $evaluatesAllElements + */ + // overridden just in case a lazy implementation is developed at some point + override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + + /** @inheritdoc + * + * $preservesLaziness + */ + override def updated[B >: A](index: Int, elem: B): LazyList[B] = + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + newLL { + if (index <= 0) sCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { + b.append(start) + if (!stateDefined) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + var scout = tail + @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { + cursor = scout + if (scoutNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scoutNonEmpty) scout = scout.tail + } + } + } + if (!scoutNonEmpty) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.stateDefined) b.append(sep).append("") + } else { + @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (!same(runner, scout)) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if (same(cursor, scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (!same(cursor, scout)) { + appendCursorElement() + cursor = cursor.tail + } + b.append(sep).append("") + } + } + b.append(end) + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = { + if (!stateDefined) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyList` + */ +@SerialVersionUID(3L) +object LazyList extends SeqFactory[LazyList] { + // Eagerly evaluate cached empty instance + private[this] val _empty = newLL(State.Empty).force + + private sealed trait State[+A] extends Serializable { + def head: A + def tail: LazyList[A] + } + + private object State { + @SerialVersionUID(3L) + object Empty extends State[Nothing] { + def head: Nothing = throw new NoSuchElementException("head of empty lazy list") + def tail: LazyList[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + } + + @SerialVersionUID(3L) + final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + } + + /** Creates a new LazyList. */ + @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + + /** Creates a new State.Cons. */ + @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + + private val anyToMarker: Any => Any = _ => Statics.pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyList`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty + } + } + + private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + val marker = Statics.pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) State.Empty + else sCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var it: Iterator[B] = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).iterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) + } else State.Empty + } + } + + private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest.state + } + } + + private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest.state + } + } + + private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while(!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest.state + } + } + + /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + */ + object cons { + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + } + + implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + /** Construct a LazyList consisting of a given first element followed by elements + * from another LazyList. + */ + def #:: [B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) + /** Construct a LazyList consisting of the concatenation of the given LazyList and + * another LazyList. + */ + def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { + case lazyList: LazyList[A] => lazyList + case _ if coll.knownSize == 0 => empty[A] + case _ => newLL(stateFromIterator(coll.iterator)) + } + + def empty[A]: LazyList[A] = _empty + + /** Creates a State from an Iterator, with another State appended after the Iterator + * is empty. + */ + private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) + else suffix + + /** Creates a State from an IterableOnce. */ + private def stateFromIterator[A](it: Iterator[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) + else State.Empty + + override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + if (xss.knownSize == 0) empty + else newLL(concatIterator(xss.iterator)) + + private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + if (!it.hasNext) State.Empty + else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) + + /** An infinite LazyList that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyList + * @param f the function that's repeatedly applied + * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyList[A] = + newLL { + val head = start + sCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyList starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyList + * @param step the increment value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int, step: Int): LazyList[Int] = + newLL(sCons(start, from(start + step, step))) + + /** + * Create an infinite LazyList starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int): LazyList[Int] = from(start, 1) + + /** + * Create an infinite LazyList containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyList + * @return the LazyList containing an infinite number of elem + */ + def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyList[A] = + if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { + def at(index: Int): LazyList[A] = + if (index < n) newLL(sCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + newLL { + f(init) match { + case Some((elem, state)) => sCons(elem, unfold(state)(f)) + case None => State.Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + extends AbstractIterator[LazyList[A]] { + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyList[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) + extends collection.WithFilter[A, LazyList] { + private[this] val filtered = lazyList.filter(p) + def map[B](f: A => B): LazyList[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + } + + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyList[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyList[A] = { + next init State.Empty + list + } + + override def addOne(elem: A): this.type = { + val deferred = new DeferredState[A] + next init sCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def addAll(xs: IterableOnce[A]): this.type = { + if (xs.knownSize != 0) { + val deferred = new DeferredState[A] + next init stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()) + next = deferred + } + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + private[this] var _state: () => State[A] = _ + + def eval(): State[A] = { + val state = _state + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => State[A]): Unit = { + if (_state != null) throw new IllegalStateException("already initialized") + _state = () => state + } + } + } + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new mutable.ListBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyList[A]] + // scala/scala#10118: caution that no code path can evaluate `tail.state` + // before the resulting LazyList is returned + val it = init.toList.iterator + coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) + } + + private[this] def readResolve(): Any = coll + } +} From bf58e2baea9ba91aebc7186828541020de4a67a5 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 21:43:46 +0100 Subject: [PATCH 116/216] Survive "cannot establish a reference" errors in TreeTypeMap --- compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 955892b2ae22..d2e18729836b 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -105,7 +105,8 @@ class TreeTypeMap( tree1.withType(mapType(tree1.tpe)) match { case id: Ident => if needsSelect(id.tpe) then - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + catch case ex: TypeError => super.transform(id) else super.transform(id) case sel: Select => From 6ae16a4d125bb87ece95c882c2436397a934b8a8 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 21:47:47 +0100 Subject: [PATCH 117/216] Add checked LazyList to stdlib --- .../collection/immutable/LazyList.scala | 219 +++++++++--------- 1 file changed, 104 insertions(+), 115 deletions(-) diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyList.scala index 8b7ad26dc5ae..711453d904a6 100644 --- a/tests/pos-special/stdlib/collection/immutable/LazyList.scala +++ b/tests/pos-special/stdlib/collection/immutable/LazyList.scala @@ -22,6 +22,8 @@ import scala.collection.generic.SerializeEnd import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} import scala.language.implicitConversions import scala.runtime.Statics +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. @@ -237,19 +239,20 @@ import scala.runtime.Statics * @define evaluatesAllElements This method evaluates all elements of the collection. */ @SerialVersionUID(3L) -final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, LazyList, LazyList[A]] +final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]^) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, LazyList, LazyList[A]] with IterableFactoryDefaults[A, LazyList] with Serializable { + this: LazyList[A]^ => import LazyList._ @volatile private[this] var stateEvaluated: Boolean = false @inline private def stateDefined: Boolean = stateEvaluated private[this] var midEvaluation = false - private lazy val state: State[A] = { + private lazy val state: State[A]^ = { // if it's already mid-evaluation, we're stuck in an infinite // self-referential loop (also it's empty) if (midEvaluation) { @@ -264,7 +267,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta res } - override def iterableFactory: SeqFactory[LazyList] = LazyList + override def iterableFactory: IterableFactory[LazyList] = LazyList override def isEmpty: Boolean = state eq State.Empty @@ -276,7 +279,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def head: A = state.head - override def tail: LazyList[A] = state.tail + override def tail: LazyList[A]^{this} = state.tail @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) @@ -302,7 +305,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ def force: this.type = { // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyList[A] = this + var these, those: LazyList[A]^{this} = this if (!these.isEmpty) { these = these.tail } @@ -322,7 +325,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def iterator: Iterator[A] = + override def iterator: Iterator[A]^{this} = if (knownIsEmpty) Iterator.empty else new LazyIterator(this) @@ -359,7 +362,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta else tail.foldLeft(op(z, head))(op) // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef = + protected[this] def writeReplace(): AnyRef^{this} = if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this override protected[this] def className = "LazyList" @@ -373,7 +376,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param suffix The collection that gets appended to this lazy list * @return The lazy list containing elements of this lazy list and the iterable object. */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyList[B]^{this, suffix} = newLL { if (isEmpty) suffix match { case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList @@ -389,7 +392,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = + def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyList[B]^{this, suffix} = if (knownIsEmpty) LazyList.from(suffix) else lazyAppendedAll(suffix) @@ -399,7 +402,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - override def appended[B >: A](elem: B): LazyList[B] = + def appended[B >: A](elem: B): LazyList[B]^{this} = if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) else lazyAppendedAll(Iterator.single(elem)) @@ -407,11 +410,11 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = + override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B]^{this, op} = if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) else newLL(scanLeftState(z)(op)) - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = sCons( z, newLL { @@ -431,7 +434,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") else { var reducedRes: B = this.head - var left: LazyList[A] = this.tail + var left: LazyList[A]^{this} = this.tail while (!left.isEmpty) { reducedRes = f(reducedRes, left.head) left = left.tail @@ -444,13 +447,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + override def partition(p: A => Boolean): (LazyList[A]^{this, p}, LazyList[A]^{this, p}) = (filter(p), filterNot(p)) /** @inheritdoc * * $preservesLaziness */ - override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1]^{this, f}, LazyList[A2]^{this, f}) = { val (left, right) = map(f).partition(_.isLeft) (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) } @@ -459,7 +462,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def filter(pred: A => Boolean): LazyList[A] = + override def filter(pred: A => Boolean): LazyList[A]^{this, pred} = if (knownIsEmpty) LazyList.empty else LazyList.filterImpl(this, pred, isFlipped = false) @@ -467,7 +470,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def filterNot(pred: A => Boolean): LazyList[A] = + override def filterNot(pred: A => Boolean): LazyList[A]^{this, pred} = if (knownIsEmpty) LazyList.empty else LazyList.filterImpl(this, pred, isFlipped = true) @@ -479,20 +482,20 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The `collection.WithFilter` returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList]^{this, p} = new LazyList.WithFilter(coll, p) /** @inheritdoc * * $preservesLaziness */ - override def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) /** @inheritdoc * * $preservesLaziness */ - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = + def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyList[B]^{this, prefix} = if (knownIsEmpty) LazyList.from(prefix) else if (prefix.knownSize == 0) this else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) @@ -501,7 +504,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def map[B](f: A => B): LazyList[B] = + override def map[B](f: A => B): LazyList[B]^{this, f} = if (knownIsEmpty) LazyList.empty else (mapImpl(f): @inline) @@ -509,9 +512,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + override def tapEach[U](f: A => U): LazyList[A]^{this, f} = map { a => f(a); a } - private def mapImpl[B](f: A => B): LazyList[B] = + private def mapImpl[B](f: A => B): LazyList[B]^{this, f} = newLL { if (isEmpty) State.Empty else sCons(f(head), tail.mapImpl(f)) @@ -521,7 +524,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = + override def collect[B](pf: PartialFunction[A, B]^): LazyList[B]^{this, pf} = if (knownIsEmpty) LazyList.empty else LazyList.collectImpl(this, pf) @@ -559,7 +562,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ // optimisations are not for speed, but for functionality // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = + override def flatMap[B](f: A => IterableOnce[B]^): LazyList[B]^{this, f} = if (knownIsEmpty) LazyList.empty else LazyList.flatMapImpl(this, f) @@ -567,17 +570,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyList[B]^{this} = flatMap(asIterable) /** @inheritdoc * * $preservesLaziness */ - override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = + override def zip[B](that: collection.IterableOnce[B]^): LazyList[(A, B)]^{this, that} = if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty else newLL(zipState(that.iterator)) - private def zipState[B](it: Iterator[B]): State[(A, B)] = + private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = if (this.isEmpty || !it.hasNext) State.Empty else sCons((head, it.next()), newLL { tail zipState it }) @@ -585,13 +588,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + override def zipWithIndex: LazyList[(A, Int)]^{this} = this zip LazyList.from(0) /** @inheritdoc * * $preservesLaziness */ - override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyList[(A1, B)]^{this, that} = { if (this.knownIsEmpty) { if (that.knownSize == 0) LazyList.empty else LazyList.continually(thisElem) zip that @@ -601,7 +604,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - private def zipAllState[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): State[(A1, B)] = { + private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { if (it.hasNext) { if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) @@ -620,21 +623,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * only evaluated individually as needed. */ // just in case it can be meaningfully overridden at some point - override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, LazyList.this.type] = + override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyList.this.type]^{this, that} = super.lazyZip(that) /** @inheritdoc * * $preservesLaziness */ - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyList[A1]^{this}, LazyList[A2]^{this}) = (map(asPair(_)._1), map(asPair(_)._2)) /** @inheritdoc * * $preservesLaziness */ - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyList[A1]^{this}, LazyList[A2]^{this}, LazyList[A3]^{this}) = (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) /** @inheritdoc @@ -642,7 +645,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $initiallyLazy * Additionally, it preserves laziness for all except the first `n` elements. */ - override def drop(n: Int): LazyList[A] = + override def drop(n: Int): LazyList[A]^{this} = if (n <= 0) this else if (knownIsEmpty) LazyList.empty else LazyList.dropImpl(this, n) @@ -652,7 +655,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $initiallyLazy * Additionally, it preserves laziness for all elements after the predicate returns `false`. */ - override def dropWhile(p: A => Boolean): LazyList[A] = + override def dropWhile(p: A => Boolean): LazyList[A]^{this, p} = if (knownIsEmpty) LazyList.empty else LazyList.dropWhileImpl(this, p) @@ -660,7 +663,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $initiallyLazy */ - override def dropRight(n: Int): LazyList[A] = { + override def dropRight(n: Int): LazyList[A]^{this} = { if (n <= 0) this else if (knownIsEmpty) LazyList.empty else newLL { @@ -675,7 +678,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - private def dropRightState(scout: LazyList[_]): State[A] = + private def dropRightState(scout: LazyList[_]^): State[A]^{this, scout} = if (scout.isEmpty) State.Empty else sCons(head, newLL(tail.dropRightState(scout.tail))) @@ -699,11 +702,11 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def takeWhile(p: A => Boolean): LazyList[A] = + override def takeWhile(p: A => Boolean): LazyList[A]^{this, p} = if (knownIsEmpty) LazyList.empty else (takeWhileImpl(p): @inline) - private def takeWhileImpl(p: A => Boolean): LazyList[A] = + private def takeWhileImpl(p: A => Boolean): LazyList[A]^{this, p} = newLL { if (isEmpty || !p(head)) State.Empty else sCons(head, tail.takeWhileImpl(p)) @@ -713,7 +716,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $initiallyLazy */ - override def takeRight(n: Int): LazyList[A] = + override def takeRight(n: Int): LazyList[A]^{this} = if (n <= 0 || knownIsEmpty) LazyList.empty else LazyList.takeRightImpl(this, n) @@ -722,13 +725,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $initiallyLazy * Additionally, it preserves laziness for all but the first `from` elements. */ - override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + override def slice(from: Int, until: Int): LazyList[A]^{this} = take(until).drop(from) /** @inheritdoc * * $evaluatesAllElements */ - override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + def reverse: LazyList[A] = reverseOnto(LazyList.empty) // need contravariant type B to make the compiler happy - still returns LazyList[A] @tailrec @@ -736,22 +739,6 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta if (isEmpty) tl else tail.reverseOnto(newLL(sCons(head, tl))) - /** @inheritdoc - * - * $preservesLaziness - */ - override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else super.diff(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else super.intersect(that) - @tailrec private def lengthGt(len: Int): Boolean = if (len < 0) true @@ -786,7 +773,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def padTo[B >: A](len: Int, elem: B): LazyList[B] = { + def padTo[B >: A](len: Int, elem: B): LazyList[B]^{this} = { if (len <= 0) this else newLL { if (isEmpty) LazyList.fill(len)(elem).state @@ -798,11 +785,11 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyList[B]^{this, other} = if (knownIsEmpty) LazyList from other else patchImpl(from, other, replaced) - private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyList[B]^{this, other} = newLL { if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) else if (isEmpty) stateFromIterator(other.iterator) @@ -814,17 +801,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $evaluatesAllElements */ // overridden just in case a lazy implementation is developed at some point - override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyList[LazyList[B]]^{this} = super.transpose /** @inheritdoc * * $preservesLaziness */ - override def updated[B >: A](index: Int, elem: B): LazyList[B] = + def updated[B >: A](index: Int, elem: B): LazyList[B]^{this} = if (index < 0) throw new IndexOutOfBoundsException(s"$index") else updatedImpl(index, elem, index) - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B]^{this} = { newLL { if (index <= 0) sCons(elem, tail) else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) @@ -883,7 +870,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta // if cursor (eq scout) has state defined, it is empty; else unknown state if (!cursor.stateDefined) b.append(sep).append("") } else { - @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + @inline def same(a: LazyList[A]^, b: LazyList[A]^): Boolean = (a eq b) || (a.state eq b.state) // Cycle. // If we have a prefix of length P followed by a cycle of length C, // the scout will be at position (P%C) in the cycle when the cursor @@ -966,13 +953,14 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @define Coll `LazyList` */ @SerialVersionUID(3L) -object LazyList extends SeqFactory[LazyList] { +object LazyList extends IterableFactory[LazyList] { // Eagerly evaluate cached empty instance private[this] val _empty = newLL(State.Empty).force private sealed trait State[+A] extends Serializable { + this: State[A]^ => def head: A - def tail: LazyList[A] + def tail: LazyList[A]^ } private object State { @@ -983,14 +971,14 @@ object LazyList extends SeqFactory[LazyList] { } @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + final class Cons[A](val head: A, val tail: LazyList[A]^) extends State[A] } /** Creates a new LazyList. */ - @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + @inline private def newLL[A](state: => State[A]^): LazyList[A]^{state} = new LazyList[A](() => state) /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + @inline private def sCons[A](hd: A, tl: LazyList[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) private val anyToMarker: Any => Any = _ => Statics.pfMarker @@ -1002,7 +990,7 @@ object LazyList extends SeqFactory[LazyList] { * can continue their execution where they left off. */ - private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + private def filterImpl[A](ll: LazyList[A]^, p: A => Boolean, isFlipped: Boolean): LazyList[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1019,7 +1007,7 @@ object LazyList extends SeqFactory[LazyList] { } } - private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + private def collectImpl[A, B](ll: LazyList[A]^, pf: PartialFunction[A, B]^): LazyList[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1038,11 +1026,11 @@ object LazyList extends SeqFactory[LazyList] { } } - private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + private def flatMapImpl[A, B](ll: LazyList[A]^, f: A => IterableOnce[B]^): LazyList[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { - var it: Iterator[B] = null + var it: Iterator[B]^{ll, f} = null var itHasNext = false var rest = restRef // var rest = restRef.elem while (!itHasNext && !rest.isEmpty) { @@ -1062,7 +1050,7 @@ object LazyList extends SeqFactory[LazyList] { } } - private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + private def dropImpl[A](ll: LazyList[A]^, n: Int): LazyList[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) var iRef = n // val iRef = new IntRef(n) @@ -1079,7 +1067,7 @@ object LazyList extends SeqFactory[LazyList] { } } - private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + private def dropWhileImpl[A](ll: LazyList[A]^, p: A => Boolean): LazyList[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1092,7 +1080,7 @@ object LazyList extends SeqFactory[LazyList] { } } - private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + private def takeRightImpl[A](ll: LazyList[A]^, n: Int): LazyList[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) var scoutRef = ll // val scoutRef = new ObjectRef(ll) @@ -1127,31 +1115,29 @@ object LazyList extends SeqFactory[LazyList] { * @param hd The first element of the result lazy list * @param tl The remaining elements of the result lazy list */ - def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + def apply[A](hd: => A, tl: => LazyList[A]^): LazyList[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + def unapply[A](xs: LazyList[A]^): Option[(A, LazyList[A]^{xs})] = #::.unapply(xs) } - implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + extension [A](l: => LazyList[A]) /** Construct a LazyList consisting of a given first element followed by elements * from another LazyList. */ - def #:: [B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) + def #:: [B >: A](elem: => B): LazyList[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) + /** Construct a LazyList consisting of the concatenation of the given LazyList and * another LazyList. */ - def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() - } + def #:::[B >: A](prefix: LazyList[B]^): LazyList[B]^{prefix, l} = prefix lazyAppendedAll l object #:: { - def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + def unapply[A](s: LazyList[A]^): Option[(A, LazyList[A]^{s})] = if (!s.isEmpty) Some((s.head, s.tail)) else None } - def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { + def from[A](coll: collection.IterableOnce[A]^): LazyList[A]^{coll} = coll match { case lazyList: LazyList[A] => lazyList case _ if coll.knownSize == 0 => empty[A] case _ => newLL(stateFromIterator(coll.iterator)) @@ -1162,12 +1148,12 @@ object LazyList extends SeqFactory[LazyList] { /** Creates a State from an Iterator, with another State appended after the Iterator * is empty. */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} = if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) else suffix /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]): State[A] = + private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} = if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) else State.Empty @@ -1175,7 +1161,7 @@ object LazyList extends SeqFactory[LazyList] { if (xss.knownSize == 0) empty else newLL(concatIterator(xss.iterator)) - private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} = if (!it.hasNext) State.Empty else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) @@ -1185,7 +1171,7 @@ object LazyList extends SeqFactory[LazyList] { * @param f the function that's repeatedly applied * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[A](start: => A)(f: A => A): LazyList[A] = + def iterate[A](start: => A)(f: A => A): LazyList[A]^{start, f} = newLL { val head = start sCons(head, iterate(f(head))(f)) @@ -1217,20 +1203,20 @@ object LazyList extends SeqFactory[LazyList] { * @param elem the element composing the resulting LazyList * @return the LazyList containing an infinite number of elem */ - def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + def continually[A](elem: => A): LazyList[A]^{elem} = newLL(sCons(elem, continually(elem))) - override def fill[A](n: Int)(elem: => A): LazyList[A] = + override def fill[A](n: Int)(elem: => A): LazyList[A]^{elem} = if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { - def at(index: Int): LazyList[A] = + override def tabulate[A](n: Int)(f: Int => A): LazyList[A]^{f} = { + def at(index: Int): LazyList[A]^{f} = if (index < n) newLL(sCons(f(index), at(index + 1))) else empty at(0) } // significantly simpler than the iterator returned by Iterator.unfold - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A]^{f} = newLL { f(init) match { case Some((elem, state)) => sCons(elem, unfold(state)(f)) @@ -1246,7 +1232,7 @@ object LazyList extends SeqFactory[LazyList] { */ def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] - private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]^) extends AbstractIterator[A] { override def hasNext: Boolean = !lazyList.isEmpty override def next(): A = @@ -1258,8 +1244,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + private class SlidingIterator[A](private[this] var lazyList: LazyList[A]^, size: Int, step: Int) extends AbstractIterator[LazyList[A]] { + this: SlidingIterator[A]^ => private val minLen = size - step max 0 private var first = true @@ -1278,20 +1265,21 @@ object LazyList extends SeqFactory[LazyList] { } } - private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) + private final class WithFilter[A] private[LazyList](lazyList: LazyList[A]^, p: A => Boolean) extends collection.WithFilter[A, LazyList] { + this: WithFilter[A]^ => private[this] val filtered = lazyList.filter(p) - def map[B](f: A => B): LazyList[B] = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def map[B](f: A => B): LazyList[B]^{this, f} = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]^): LazyList[B]^{this, f} = filtered.flatMap(f) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList]^{this, q} = new WithFilter(filtered, q) } private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { import LazyBuilder._ - private[this] var next: DeferredState[A] = _ - private[this] var list: LazyList[A] = _ + private[this] var next: DeferredState[A @uncheckedCaptures] = _ + private[this] var list: LazyList[A @uncheckedCaptures] = _ clear() @@ -1314,10 +1302,10 @@ object LazyList extends SeqFactory[LazyList] { } // lazy implementation which doesn't evaluate the collection being added - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { if (xs.knownSize != 0) { val deferred = new DeferredState[A] - next init stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()) + next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval())) next = deferred } this @@ -1326,16 +1314,17 @@ object LazyList extends SeqFactory[LazyList] { private object LazyBuilder { final class DeferredState[A] { - private[this] var _state: () => State[A] = _ + this: DeferredState[A]^ => + private[this] var _state: (() => State[A]^) @uncheckedCaptures = _ - def eval(): State[A] = { + def eval(): State[A]^ = { val state = _state if (state == null) throw new IllegalStateException("uninitialized") state() } // racy - def init(state: => State[A]): Unit = { + def init(state: => State[A]^): Unit = { if (_state != null) throw new IllegalStateException("already initialized") _state = () => state } @@ -1348,7 +1337,7 @@ object LazyList extends SeqFactory[LazyList] { * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. */ @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + final class SerializationProxy[A](@transient protected var coll: LazyList[A]^) extends Serializable { private[this] def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() @@ -1363,7 +1352,7 @@ object LazyList extends SeqFactory[LazyList] { private[this] def readObject(in: ObjectInputStream): Unit = { in.defaultReadObject() - val init = new mutable.ListBuffer[A] + val init = new mutable.ListBuffer[A @uncheckedCaptures] var initRead = false while (!initRead) in.readObject match { case SerializeEnd => initRead = true From 0acfb8c0047aacbe69bb14f1ce406ad017471051 Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 21:54:43 +0100 Subject: [PATCH 118/216] Rename LazyList to LazyListIterable --- .../immutable/{LazyList.scala => LazyListIterable.scala} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/pos-special/stdlib/collection/immutable/{LazyList.scala => LazyListIterable.scala} (100%) diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala similarity index 100% rename from tests/pos-special/stdlib/collection/immutable/LazyList.scala rename to tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala From fd88dc1d9bf0c8e7460b0f212e7c9571128fdffb Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 10:43:21 +0100 Subject: [PATCH 119/216] Allow deep subtypes when compiling stdlib test --- .../dotty/tools/dotc/CompilationTests.scala | 2 +- .../immutable/LazyListIterable.scala | 418 +++++++++--------- 2 files changed, 213 insertions(+), 207 deletions(-) diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 798e998ef241..fa89c82fc7e7 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -44,7 +44,7 @@ class CompilationTests { // Run tests for legacy lazy vals compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), - compileDir("tests/pos-special/stdlib", defaultOptions), + compileDir("tests/pos-special/stdlib", allowDeepSubtypes), ) if scala.util.Properties.isJavaAtLeast("16") then diff --git a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala index 711453d904a6..8d804bad13de 100644 --- a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala @@ -28,17 +28,23 @@ import annotation.unchecked.uncheckedCaptures /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. * + * The class extends Iterable; it is a replacement for LazyList, which + * which implemented Seq. The reason is that under capture checking, we + * assume that all Seqs are strict, and LazyList broke that assumption. + * As a consequence, we declare LazyList is deprecated and unsafe for + * capture checking, and replace it by the current class, LazyListIterable. + * * Elements are memoized; that is, the value of each element is computed at most once. * * Elements are computed in-order and are never skipped. In other words, * accessing the tail causes the head to be computed first. * - * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you * don't know yet whether the list is empty or not. If you learn that it is non-empty, * then you also know that the head has been computed. But the tail is itself - * a `LazyList`, whose emptiness-or-not might remain undetermined. + * a `LazyListIterable`, whose emptiness-or-not might remain undetermined. * - * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. * @@ -47,7 +53,7 @@ import annotation.unchecked.uncheckedCaptures * {{{ * import scala.math.BigInt * object Main extends App { - * val fibs: LazyList[BigInt] = + * val fibs: LazyListIterable[BigInt] = * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } * fibs.take(5).foreach(println) * } @@ -67,7 +73,7 @@ import annotation.unchecked.uncheckedCaptures * {{{ * import scala.math.BigInt * object Main extends App { - * val fibs: LazyList[BigInt] = + * val fibs: LazyListIterable[BigInt] = * BigInt(0) #:: BigInt(1) #:: * fibs.zip(fibs.tail).map{ n => * println(s"Adding \${n._1} and \${n._2}") @@ -100,22 +106,22 @@ import annotation.unchecked.uncheckedCaptures * }}} * * Note that the definition of `fibs` uses `val` not `def`. The memoization of the - * `LazyList` requires us to have somewhere to store the information and a `val` + * `LazyListIterable` requires us to have somewhere to store the information and a `val` * allows us to do that. * - * Further remarks about the semantics of `LazyList`: + * Further remarks about the semantics of `LazyListIterable`: * - * - Though the `LazyList` changes as it is accessed, this does not + * - Though the `LazyListIterable` changes as it is accessed, this does not * contradict its immutability. Once the values are memoized they do * not change. Values that have yet to be memoized still "exist", they * simply haven't been computed yet. * * - One must be cautious of memoization; it can eat up memory if you're not - * careful. That's because memoization of the `LazyList` creates a structure much like + * careful. That's because memoization of the `LazyListIterable` creates a structure much like * [[scala.collection.immutable.List]]. As long as something is holding on to * the head, the head holds on to the tail, and so on recursively. * If, on the other hand, there is nothing holding on to the head (e.g. if we used - * `def` to define the `LazyList`) then once it is no longer being used directly, + * `def` to define the `LazyListIterable`) then once it is no longer being used directly, * it disappears. * * - Note that some operations, including [[drop]], [[dropWhile]], @@ -135,30 +141,30 @@ import annotation.unchecked.uncheckedCaptures * } * } * - * // Our first LazyList definition will be a val definition - * val lazylist1: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * // Our first LazyListIterable definition will be a val definition + * val lazylist1: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) * loop(0) * } * * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1 * val it1 = lazylist1.iterator * loop("Iterator1: ", it1.next(), it1) * - * // We can redefine this LazyList such that all we have is the Iterator left - * // and allow the LazyList to be garbage collected as required. Using a def - * // to provide the LazyList ensures that no val is holding onto the head as + * // We can redefine this LazyListIterable such that all we have is the Iterator left + * // and allow the LazyListIterable to be garbage collected as required. Using a def + * // to provide the LazyListIterable ensures that no val is holding onto the head as * // is the case with lazylist1 - * def lazylist2: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * def lazylist2: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) * loop(0) * } * val it2 = lazylist2.iterator * loop("Iterator2: ", it2.next(), it2) * - * // And, of course, we don't actually need a LazyList at all for such a simple - * // problem. There's no reason to use a LazyList if you don't actually need + * // And, of course, we don't actually need a LazyListIterable at all for such a simple + * // problem. There's no reason to use a LazyListIterable if you don't actually need * // one. * val it3 = new Iterator[Int] { * var i = -1 @@ -169,7 +175,7 @@ import annotation.unchecked.uncheckedCaptures * }}} * * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. - * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic. * If we defined `fibs` such that only `0` were concretely known, then the act * of determining `tail` would require the evaluation of `tail`, so the * computation would be unable to progress, as in this code: @@ -177,7 +183,7 @@ import annotation.unchecked.uncheckedCaptures * // The first time we try to access the tail we're going to need more * // information which will require us to recurse, which will require us to * // recurse, which... - * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } * }}} * * The definition of `fibs` above creates a larger number of objects than @@ -186,8 +192,8 @@ import annotation.unchecked.uncheckedCaptures * fact that it has a more direct route to the numbers themselves: * * {{{ - * lazy val fib: LazyList[Int] = { - * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * lazy val fib: LazyListIterable[Int] = { + * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n) * loop(1, 1) * } * }}} @@ -198,8 +204,8 @@ import annotation.unchecked.uncheckedCaptures * the tails content is deferred until the tails empty status, head or tail is * evaluated. * - * Delaying the evaluation of whether a LazyList is empty or not until it's needed - * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed + * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`. * * Only when it's further evaluated (which may be never!) any of the elements gets * forced. @@ -207,24 +213,24 @@ import annotation.unchecked.uncheckedCaptures * for example: * * {{{ - * def tailWithSideEffect: LazyList[Nothing] = { - * println("getting empty LazyList") - * LazyList.empty + * def tailWithSideEffect: LazyListIterable[Nothing] = { + * println("getting empty LazyListIterable") + * LazyListIterable.empty * } * - * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable" * * val suspended = 1 #:: tailWithSideEffect // doesn't print anything * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed * val filtered = tail.filter(_ => false) // still nothing is printed - * filtered.isEmpty // prints "getting empty LazyList" + * filtered.isEmpty // prints "getting empty LazyListIterable" * }}} * * @tparam A the type of the elements contained in this lazy list. * * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] * section on `LazyLists` for more information. - * @define Coll `LazyList` + * @define Coll `LazyListIterable` * @define coll lazy list * @define orderDependent * @define orderDependentFold @@ -239,14 +245,14 @@ import annotation.unchecked.uncheckedCaptures * @define evaluatesAllElements This method evaluates all elements of the collection. */ @SerialVersionUID(3L) -final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]^) +final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) extends AbstractIterable[A] with Iterable[A] - with IterableOps[A, LazyList, LazyList[A]] - with IterableFactoryDefaults[A, LazyList] + with IterableOps[A, LazyListIterable, LazyListIterable[A]] + with IterableFactoryDefaults[A, LazyListIterable] with Serializable { - this: LazyList[A]^ => - import LazyList._ + this: LazyListIterable[A]^ => + import LazyListIterable._ @volatile private[this] var stateEvaluated: Boolean = false @inline private def stateDefined: Boolean = stateEvaluated @@ -256,7 +262,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta // if it's already mid-evaluation, we're stuck in an infinite // self-referential loop (also it's empty) if (midEvaluation) { - throw new RuntimeException("self-referential LazyList or a derivation thereof has no more elements") + throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") } midEvaluation = true val res = try lazyState() finally midEvaluation = false @@ -267,7 +273,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta res } - override def iterableFactory: IterableFactory[LazyList] = LazyList + override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable override def isEmpty: Boolean = state eq State.Empty @@ -279,7 +285,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def head: A = state.head - override def tail: LazyList[A]^{this} = state.tail + override def tail: LazyListIterable[A]^{this} = state.tail @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) @@ -290,13 +296,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * elements of the cycle are evaluated. For example: * * {{{ - * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring * ring.force * ring.toString * * // prints * // - * // LazyList(1, 2, 3, ...) + * // LazyListIterable(1, 2, 3, ...) * }}} * * This method will *not* terminate for non-cyclic infinite-sized collections. @@ -305,7 +311,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ def force: this.type = { // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyList[A]^{this} = this + var these, those: LazyListIterable[A]^{this} = this if (!these.isEmpty) { these = these.tail } @@ -335,9 +341,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param f The treatment to apply to each element. * @note Overridden here as final to trigger tail-call optimization, which * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyList as elements + * necessary for allowing the GC to collect the underlying LazyListIterable as elements * are consumed. - * @note This function will force the realization of the entire LazyList + * @note This function will force the realization of the entire LazyListIterable * unless the `f` throws an exception. */ @tailrec @@ -348,12 +354,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - /** LazyList specialization of foldLeft which allows GC to collect along the + /** LazyListIterable specialization of foldLeft which allows GC to collect along the * way. * * @tparam B The type of value being accumulated. * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyList`. + * @param op The operation to perform on successive elements of the `LazyListIterable`. * @return The accumulated value from successive applications of `op`. */ @tailrec @@ -363,9 +369,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta // State.Empty doesn't use the SerializationProxy protected[this] def writeReplace(): AnyRef^{this} = - if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this - override protected[this] def className = "LazyList" + override protected[this] def className = "LazyListIterable" /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. * @@ -376,10 +382,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param suffix The collection that gets appended to this lazy list * @return The lazy list containing elements of this lazy list and the iterable object. */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyList[B]^{this, suffix} = + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = newLL { if (isEmpty) suffix match { - case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable case coll if coll.knownSize == 0 => State.Empty case coll => stateFromIterator(coll.iterator) } @@ -392,8 +398,8 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyList[B]^{this, suffix} = - if (knownIsEmpty) LazyList.from(suffix) + def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = + if (knownIsEmpty) LazyListIterable.from(suffix) else lazyAppendedAll(suffix) /** @inheritdoc @@ -402,16 +408,16 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - def appended[B >: A](elem: B): LazyList[B]^{this} = - if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + def appended[B >: A](elem: B): LazyListIterable[B]^{this} = + if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty)) else lazyAppendedAll(Iterator.single(elem)) /** @inheritdoc * * $preservesLaziness */ - override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B]^{this, op} = - if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} = + if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty)) else newLL(scanLeftState(z)(op)) private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = @@ -423,18 +429,18 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } ) - /** LazyList specialization of reduceLeft which allows GC to collect + /** LazyListIterable specialization of reduceLeft which allows GC to collect * along the way. * * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyList`. + * @param f The operation to perform on successive elements of the `LazyListIterable`. * @return The accumulated value from successive applications of `f`. */ override def reduceLeft[B >: A](f: (B, A) => B): B = { if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") else { var reducedRes: B = this.head - var left: LazyList[A]^{this} = this.tail + var left: LazyListIterable[A]^{this} = this.tail while (!left.isEmpty) { reducedRes = f(reducedRes, left.head) left = left.tail @@ -447,13 +453,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def partition(p: A => Boolean): (LazyList[A]^{this, p}, LazyList[A]^{this, p}) = (filter(p), filterNot(p)) + override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p)) /** @inheritdoc * * $preservesLaziness */ - override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1]^{this, f}, LazyList[A2]^{this, f}) = { + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = { val (left, right) = map(f).partition(_.isLeft) (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) } @@ -462,17 +468,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def filter(pred: A => Boolean): LazyList[A]^{this, pred} = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = false) + override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = false) /** @inheritdoc * * $preservesLaziness */ - override def filterNot(pred: A => Boolean): LazyList[A]^{this, pred} = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = true) + override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = true) /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. * @@ -482,21 +488,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The `collection.WithFilter` returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList]^{this, p} = - new LazyList.WithFilter(coll, p) + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} = + new LazyListIterable.WithFilter(coll, p) /** @inheritdoc * * $preservesLaziness */ - def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this)) /** @inheritdoc * * $preservesLaziness */ - def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyList[B]^{this, prefix} = - if (knownIsEmpty) LazyList.from(prefix) + def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} = + if (knownIsEmpty) LazyListIterable.from(prefix) else if (prefix.knownSize == 0) this else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) @@ -504,17 +510,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def map[B](f: A => B): LazyList[B]^{this, f} = - if (knownIsEmpty) LazyList.empty + override def map[B](f: A => B): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty else (mapImpl(f): @inline) /** @inheritdoc * * $preservesLaziness */ - override def tapEach[U](f: A => U): LazyList[A]^{this, f} = map { a => f(a); a } + override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a } - private def mapImpl[B](f: A => B): LazyList[B]^{this, f} = + private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} = newLL { if (isEmpty) State.Empty else sCons(f(head), tail.mapImpl(f)) @@ -524,9 +530,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def collect[B](pf: PartialFunction[A, B]^): LazyList[B]^{this, pf} = - if (knownIsEmpty) LazyList.empty - else LazyList.collectImpl(this, pf) + override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.collectImpl(this, pf) /** @inheritdoc * @@ -537,7 +543,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = if (isEmpty) None else { - val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B]) if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) else Some(res) } @@ -562,22 +568,22 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ // optimisations are not for speed, but for functionality // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override def flatMap[B](f: A => IterableOnce[B]^): LazyList[B]^{this, f} = - if (knownIsEmpty) LazyList.empty - else LazyList.flatMapImpl(this, f) + override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.flatMapImpl(this, f) /** @inheritdoc * * $preservesLaziness */ - override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyList[B]^{this} = flatMap(asIterable) + override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable) /** @inheritdoc * * $preservesLaziness */ - override def zip[B](that: collection.IterableOnce[B]^): LazyList[(A, B)]^{this, that} = - if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty + override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} = + if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty else newLL(zipState(that.iterator)) private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = @@ -588,29 +594,29 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def zipWithIndex: LazyList[(A, Int)]^{this} = this zip LazyList.from(0) + override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0) /** @inheritdoc * * $preservesLaziness */ - override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyList[(A1, B)]^{this, that} = { + override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = { if (this.knownIsEmpty) { - if (that.knownSize == 0) LazyList.empty - else LazyList.continually(thisElem) zip that + if (that.knownSize == 0) LazyListIterable.empty + else LazyListIterable.continually(thisElem) zip that } else { - if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem)) else newLL(zipAllState(that.iterator, thisElem, thatElem)) } } private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it }) else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) } else { if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zip LazyList.continually(thatElem)) + else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem)) } } @@ -623,21 +629,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * only evaluated individually as needed. */ // just in case it can be meaningfully overridden at some point - override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyList.this.type]^{this, that} = + override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} = super.lazyZip(that) /** @inheritdoc * * $preservesLaziness */ - override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyList[A1]^{this}, LazyList[A2]^{this}) = + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) = (map(asPair(_)._1), map(asPair(_)._2)) /** @inheritdoc * * $preservesLaziness */ - override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyList[A1]^{this}, LazyList[A2]^{this}, LazyList[A3]^{this}) = + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) = (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) /** @inheritdoc @@ -645,27 +651,27 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $initiallyLazy * Additionally, it preserves laziness for all except the first `n` elements. */ - override def drop(n: Int): LazyList[A]^{this} = + override def drop(n: Int): LazyListIterable[A]^{this} = if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else LazyList.dropImpl(this, n) + else if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropImpl(this, n) /** @inheritdoc * * $initiallyLazy * Additionally, it preserves laziness for all elements after the predicate returns `false`. */ - override def dropWhile(p: A => Boolean): LazyList[A]^{this, p} = - if (knownIsEmpty) LazyList.empty - else LazyList.dropWhileImpl(this, p) + override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropWhileImpl(this, p) /** @inheritdoc * * $initiallyLazy */ - override def dropRight(n: Int): LazyList[A]^{this} = { + override def dropRight(n: Int): LazyListIterable[A]^{this} = { if (n <= 0) this - else if (knownIsEmpty) LazyList.empty + else if (knownIsEmpty) LazyListIterable.empty else newLL { var scout = this var remaining = n @@ -678,7 +684,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - private def dropRightState(scout: LazyList[_]^): State[A]^{this, scout} = + private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} = if (scout.isEmpty) State.Empty else sCons(head, newLL(tail.dropRightState(scout.tail))) @@ -686,12 +692,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def take(n: Int): LazyList[A] = - if (knownIsEmpty) LazyList.empty + override def take(n: Int): LazyListIterable[A] = + if (knownIsEmpty) LazyListIterable.empty else (takeImpl(n): @inline) - private def takeImpl(n: Int): LazyList[A] = { - if (n <= 0) LazyList.empty + private def takeImpl(n: Int): LazyListIterable[A] = { + if (n <= 0) LazyListIterable.empty else newLL { if (isEmpty) State.Empty else sCons(head, tail.takeImpl(n - 1)) @@ -702,11 +708,11 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def takeWhile(p: A => Boolean): LazyList[A]^{this, p} = - if (knownIsEmpty) LazyList.empty + override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty else (takeWhileImpl(p): @inline) - private def takeWhileImpl(p: A => Boolean): LazyList[A]^{this, p} = + private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} = newLL { if (isEmpty || !p(head)) State.Empty else sCons(head, tail.takeWhileImpl(p)) @@ -716,26 +722,26 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $initiallyLazy */ - override def takeRight(n: Int): LazyList[A]^{this} = - if (n <= 0 || knownIsEmpty) LazyList.empty - else LazyList.takeRightImpl(this, n) + override def takeRight(n: Int): LazyListIterable[A]^{this} = + if (n <= 0 || knownIsEmpty) LazyListIterable.empty + else LazyListIterable.takeRightImpl(this, n) /** @inheritdoc * * $initiallyLazy * Additionally, it preserves laziness for all but the first `from` elements. */ - override def slice(from: Int, until: Int): LazyList[A]^{this} = take(until).drop(from) + override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from) /** @inheritdoc * * $evaluatesAllElements */ - def reverse: LazyList[A] = reverseOnto(LazyList.empty) + def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty) - // need contravariant type B to make the compiler happy - still returns LazyList[A] + // need contravariant type B to make the compiler happy - still returns LazyListIterable[A] @tailrec - private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] = if (isEmpty) tl else tail.reverseOnto(newLL(sCons(head, tl))) @@ -750,7 +756,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method mostly preserves laziness; * a single element ahead of the iterator is evaluated. */ - override def grouped(size: Int): Iterator[LazyList[A]] = { + override def grouped(size: Int): Iterator[LazyListIterable[A]] = { require(size > 0, "size must be positive, but was " + size) slidingImpl(size = size, step = size) } @@ -760,12 +766,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method mostly preserves laziness; * `size - step max 1` elements ahead of the iterator are evaluated. */ - override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") slidingImpl(size = size, step = step) } - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = if (knownIsEmpty) Iterator.empty else new SlidingIterator[A](this, size = size, step = step) @@ -773,10 +779,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - def padTo[B >: A](len: Int, elem: B): LazyList[B]^{this} = { + def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = { if (len <= 0) this else newLL { - if (isEmpty) LazyList.fill(len)(elem).state + if (isEmpty) LazyListIterable.fill(len)(elem).state else sCons(head, tail.padTo(len - 1, elem)) } } @@ -785,13 +791,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyList[B]^{this, other} = - if (knownIsEmpty) LazyList from other + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = + if (knownIsEmpty) LazyListIterable from other else patchImpl(from, other, replaced) - private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyList[B]^{this, other} = + private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = newLL { - if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state) else if (isEmpty) stateFromIterator(other.iterator) else sCons(head, tail.patchImpl(from - 1, other, replaced)) } @@ -801,17 +807,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $evaluatesAllElements */ // overridden just in case a lazy implementation is developed at some point - override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyList[LazyList[B]]^{this} = super.transpose + override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose /** @inheritdoc * * $preservesLaziness */ - def updated[B >: A](index: Int, elem: B): LazyList[B]^{this} = + def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} = if (index < 0) throw new IndexOutOfBoundsException(s"$index") else updatedImpl(index, elem, index) - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B]^{this} = { + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = { newLL { if (index <= 0) sCons(elem, tail) else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) @@ -870,7 +876,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta // if cursor (eq scout) has state defined, it is empty; else unknown state if (!cursor.stateDefined) b.append(sep).append("") } else { - @inline def same(a: LazyList[A]^, b: LazyList[A]^): Boolean = (a eq b) || (a.state eq b.state) + @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state) // Cycle. // If we have a prefix of length P followed by a cycle of length C, // the scout will be at position (P%C) in the cycle when the cursor @@ -913,9 +919,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * Examples: * - * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains * a cycle at the fourth element. */ override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString @@ -950,47 +956,47 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta /** * $factoryInfo * @define coll lazy list - * @define Coll `LazyList` + * @define Coll `LazyListIterable` */ @SerialVersionUID(3L) -object LazyList extends IterableFactory[LazyList] { +object LazyListIterable extends IterableFactory[LazyListIterable] { // Eagerly evaluate cached empty instance private[this] val _empty = newLL(State.Empty).force private sealed trait State[+A] extends Serializable { this: State[A]^ => def head: A - def tail: LazyList[A]^ + def tail: LazyListIterable[A]^ } private object State { @SerialVersionUID(3L) object Empty extends State[Nothing] { def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyList[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") } @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyList[A]^) extends State[A] + final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A] } - /** Creates a new LazyList. */ - @inline private def newLL[A](state: => State[A]^): LazyList[A]^{state} = new LazyList[A](() => state) + /** Creates a new LazyListIterable. */ + @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state) /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyList[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) + @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) private val anyToMarker: Any => Any = _ => Statics.pfMarker /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyList`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they * can continue their execution where they left off. */ - private def filterImpl[A](ll: LazyList[A]^, p: A => Boolean, isFlipped: Boolean): LazyList[A]^{ll, p} = { + private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1007,7 +1013,7 @@ object LazyList extends IterableFactory[LazyList] { } } - private def collectImpl[A, B](ll: LazyList[A]^, pf: PartialFunction[A, B]^): LazyList[B]^{ll, pf} = { + private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1026,7 +1032,7 @@ object LazyList extends IterableFactory[LazyList] { } } - private def flatMapImpl[A, B](ll: LazyList[A]^, f: A => IterableOnce[B]^): LazyList[B]^{ll, f} = { + private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1050,7 +1056,7 @@ object LazyList extends IterableFactory[LazyList] { } } - private def dropImpl[A](ll: LazyList[A]^, n: Int): LazyList[A]^{ll} = { + private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) var iRef = n // val iRef = new IntRef(n) @@ -1067,7 +1073,7 @@ object LazyList extends IterableFactory[LazyList] { } } - private def dropWhileImpl[A](ll: LazyList[A]^, p: A => Boolean): LazyList[A]^{ll, p} = { + private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) newLL { @@ -1080,7 +1086,7 @@ object LazyList extends IterableFactory[LazyList] { } } - private def takeRightImpl[A](ll: LazyList[A]^, n: Int): LazyList[A]^{ll} = { + private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD var restRef = ll // val restRef = new ObjectRef(ll) var scoutRef = ll // val scoutRef = new ObjectRef(ll) @@ -1108,42 +1114,42 @@ object LazyList extends IterableFactory[LazyList] { } } - /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl). */ object cons { /** A lazy list consisting of a given first element and remaining elements * @param hd The first element of the result lazy list * @param tl The remaining elements of the result lazy list */ - def apply[A](hd: => A, tl: => LazyList[A]^): LazyList[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) + def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyList[A]^): Option[(A, LazyList[A]^{xs})] = #::.unapply(xs) + def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs) } - extension [A](l: => LazyList[A]) - /** Construct a LazyList consisting of a given first element followed by elements - * from another LazyList. + extension [A](l: => LazyListIterable[A]) + /** Construct a LazyListIterable consisting of a given first element followed by elements + * from another LazyListIterable. */ - def #:: [B >: A](elem: => B): LazyList[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) + def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) - /** Construct a LazyList consisting of the concatenation of the given LazyList and - * another LazyList. + /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and + * another LazyListIterable. */ - def #:::[B >: A](prefix: LazyList[B]^): LazyList[B]^{prefix, l} = prefix lazyAppendedAll l + def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l object #:: { - def unapply[A](s: LazyList[A]^): Option[(A, LazyList[A]^{s})] = + def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] = if (!s.isEmpty) Some((s.head, s.tail)) else None } - def from[A](coll: collection.IterableOnce[A]^): LazyList[A]^{coll} = coll match { - case lazyList: LazyList[A] => lazyList + def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match { + case lazyList: LazyListIterable[A] => lazyList case _ if coll.knownSize == 0 => empty[A] case _ => newLL(stateFromIterator(coll.iterator)) } - def empty[A]: LazyList[A] = _empty + def empty[A]: LazyListIterable[A] = _empty /** Creates a State from an Iterator, with another State appended after the Iterator * is empty. @@ -1157,7 +1163,7 @@ object LazyList extends IterableFactory[LazyList] { if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) else State.Empty - override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] = if (xss.knownSize == 0) empty else newLL(concatIterator(xss.iterator)) @@ -1165,58 +1171,58 @@ object LazyList extends IterableFactory[LazyList] { if (!it.hasNext) State.Empty else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) - /** An infinite LazyList that repeatedly applies a given function to a start value. + /** An infinite LazyListIterable that repeatedly applies a given function to a start value. * - * @param start the start value of the LazyList + * @param start the start value of the LazyListIterable * @param f the function that's repeatedly applied - * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[A](start: => A)(f: A => A): LazyList[A]^{start, f} = + def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} = newLL { val head = start sCons(head, iterate(f(head))(f)) } /** - * Create an infinite LazyList starting at `start` and incrementing by + * Create an infinite LazyListIterable starting at `start` and incrementing by * step `step`. * - * @param start the start value of the LazyList - * @param step the increment value of the LazyList - * @return the LazyList starting at value `start`. + * @param start the start value of the LazyListIterable + * @param step the increment value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. */ - def from(start: Int, step: Int): LazyList[Int] = + def from(start: Int, step: Int): LazyListIterable[Int] = newLL(sCons(start, from(start + step, step))) /** - * Create an infinite LazyList starting at `start` and incrementing by `1`. + * Create an infinite LazyListIterable starting at `start` and incrementing by `1`. * - * @param start the start value of the LazyList - * @return the LazyList starting at value `start`. + * @param start the start value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. */ - def from(start: Int): LazyList[Int] = from(start, 1) + def from(start: Int): LazyListIterable[Int] = from(start, 1) /** - * Create an infinite LazyList containing the given element expression (which + * Create an infinite LazyListIterable containing the given element expression (which * is computed for each occurrence). * - * @param elem the element composing the resulting LazyList - * @return the LazyList containing an infinite number of elem + * @param elem the element composing the resulting LazyListIterable + * @return the LazyListIterable containing an infinite number of elem */ - def continually[A](elem: => A): LazyList[A]^{elem} = newLL(sCons(elem, continually(elem))) + def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem))) - override def fill[A](n: Int)(elem: => A): LazyList[A]^{elem} = + override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} = if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - override def tabulate[A](n: Int)(f: Int => A): LazyList[A]^{f} = { - def at(index: Int): LazyList[A]^{f} = + override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = { + def at(index: Int): LazyListIterable[A]^{f} = if (index < n) newLL(sCons(f(index), at(index + 1))) else empty at(0) } // significantly simpler than the iterator returned by Iterator.unfold - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A]^{f} = + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} = newLL { f(init) match { case Some((elem, state)) => sCons(elem, unfold(state)(f)) @@ -1230,9 +1236,9 @@ object LazyList extends IterableFactory[LazyList] { * @tparam A the type of the ${coll}’s elements * @return A builder for $Coll objects. */ - def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - private class LazyIterator[+A](private[this] var lazyList: LazyList[A]^) extends AbstractIterator[A] { + private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { override def hasNext: Boolean = !lazyList.isEmpty override def next(): A = @@ -1244,8 +1250,8 @@ object LazyList extends IterableFactory[LazyList] { } } - private class SlidingIterator[A](private[this] var lazyList: LazyList[A]^, size: Int, step: Int) - extends AbstractIterator[LazyList[A]] { + private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) + extends AbstractIterator[LazyListIterable[A]] { this: SlidingIterator[A]^ => private val minLen = size - step max 0 private var first = true @@ -1254,7 +1260,7 @@ object LazyList extends IterableFactory[LazyList] { if (first) !lazyList.isEmpty else lazyList.lengthGt(minLen) - def next(): LazyList[A] = { + def next(): LazyListIterable[A] = { if (!hasNext) Iterator.empty.next() else { first = false @@ -1265,21 +1271,21 @@ object LazyList extends IterableFactory[LazyList] { } } - private final class WithFilter[A] private[LazyList](lazyList: LazyList[A]^, p: A => Boolean) - extends collection.WithFilter[A, LazyList] { + private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean) + extends collection.WithFilter[A, LazyListIterable] { this: WithFilter[A]^ => private[this] val filtered = lazyList.filter(p) - def map[B](f: A => B): LazyList[B]^{this, f} = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]^): LazyList[B]^{this, f} = filtered.flatMap(f) + def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList]^{this, q} = new WithFilter(filtered, q) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q) } - private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] { import LazyBuilder._ private[this] var next: DeferredState[A @uncheckedCaptures] = _ - private[this] var list: LazyList[A @uncheckedCaptures] = _ + private[this] var list: LazyListIterable[A @uncheckedCaptures] = _ clear() @@ -1289,7 +1295,7 @@ object LazyList extends IterableFactory[LazyList] { next = deferred } - override def result(): LazyList[A] = { + override def result(): LazyListIterable[A] = { next init State.Empty list } @@ -1337,7 +1343,7 @@ object LazyList extends IterableFactory[LazyList] { * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. */ @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyList[A]^) extends Serializable { + final class SerializationProxy[A](@transient protected var coll: LazyListIterable[A]^) extends Serializable { private[this] def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() @@ -1358,9 +1364,9 @@ object LazyList extends IterableFactory[LazyList] { case SerializeEnd => initRead = true case a => init += a.asInstanceOf[A] } - val tail = in.readObject().asInstanceOf[LazyList[A]] + val tail = in.readObject().asInstanceOf[LazyListIterable[A]] // scala/scala#10118: caution that no code path can evaluate `tail.state` - // before the resulting LazyList is returned + // before the resulting LazyListIterable is returned val it = init.toList.iterator coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) } From 9ae3ee71797d80f1261a1a548519ab4869953a3c Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 10:44:50 +0100 Subject: [PATCH 120/216] Add generic/IsSeq to stdlib --- .../stdlib/collection/generic/IsSeq.scala | 117 ++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100644 tests/pos-special/stdlib/collection/generic/IsSeq.scala diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala new file mode 100644 index 000000000000..84fcd7be14df --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -0,0 +1,117 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import scala.reflect.ClassTag + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for + * some types `A` and `C`. + * + * This type enables simple enrichment of `Seq`s with extension methods which + * can make full use of the mechanics of the Scala collections framework in + * their implementation. + * + * @see [[scala.collection.generic.IsIterable]] + */ +trait IsSeq[Repr] extends IsIterable[Repr] { + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` + * + * @note The second type parameter of the returned `SeqOps` value is + * still `Iterable` (and not `Seq`) because `SeqView[A]` only + * extends `SeqOps[A, View, View[A]]`. + */ + def apply(coll: Repr): SeqOps[A, Iterable, C] +} + +object IsSeq { + + private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = + new IsSeq[Seq[Any]] { + type A = Any + type C = Any + def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll + } + + implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = + seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] + +/* Under cc, views are not Seqs and can't use SeqOps. + + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsSeq[CC0[A0]] { + type A = A0 + type C = View[A] + def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + } + + implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = + new IsSeq[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll + } + +*/ + implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = + new IsSeq[String] { + type A = Char + type C = String + def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = + new SeqOps[Char, immutable.ArraySeq, String] { + def length: Int = s.length + def apply(i: Int): Char = s.charAt(i) + def toIterable: Iterable[Char] = new immutable.WrappedString(s) + protected[this] def coll: String = s + protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString + def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + override def empty: String = "" + protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder + def iterator: Iterator[Char] = s.iterator + } + } + + implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + new IsSeq[Array[A0]] { + type A = A0 + type C = Array[A0] + def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = + new SeqOps[A, mutable.ArraySeq, Array[A]] { + def apply(i: Int): A = a(i) + def length: Int = a.length + def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + protected def coll: Array[A] = a + protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) + def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + override def empty: Array[A] = Array.empty[A] + protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder + def iterator: Iterator[A] = a.iterator + } + } + + // `Range` can not be unified with the `CC0` parameter of the + // `seqOpsIsSeq` definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = + new IsSeq[C0] { + type A = Int + type C = immutable.IndexedSeq[Int] + def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll + } + +} From f8f20508c1b225a55693262e7d3cac37b945d09c Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 11:08:46 +0100 Subject: [PATCH 121/216] Make SubstRecThis typemap idempotent --- compiler/src/dotty/tools/dotc/core/Substituters.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 5a641416b3e1..bd30177adcb4 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -189,7 +189,7 @@ object Substituters: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } From b19f981675f537c9c7a4289abd5e731eeac26528 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 11:09:15 +0100 Subject: [PATCH 122/216] Capturecheck all files in generic --- .../collection/generic/BitOperations.scala | 51 ++++++ .../generic/DefaultSerializationProxy.scala | 90 ++++++++++ .../collection/generic/IsIterable.scala | 165 ++++++++++++++++++ .../collection/generic/IsIterableOnce.scala | 72 ++++++++ .../stdlib/collection/generic/IsMap.scala | 115 ++++++++++++ .../stdlib/collection/generic/IsSeq.scala | 32 ++-- .../collection/generic/Subtractable.scala | 63 +++++++ .../stdlib/collection/generic/package.scala | 35 ++++ 8 files changed, 610 insertions(+), 13 deletions(-) create mode 100644 tests/pos-special/stdlib/collection/generic/BitOperations.scala create mode 100644 tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsIterable.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsMap.scala create mode 100644 tests/pos-special/stdlib/collection/generic/Subtractable.scala create mode 100644 tests/pos-special/stdlib/collection/generic/package.scala diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala new file mode 100644 index 000000000000..f76619a004fa --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala @@ -0,0 +1,51 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic +import language.experimental.captureChecking + + +/** Some bit operations. + * + * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for + * an explanation of unsignedCompare. + */ +private[collection] object BitOperations { + trait Int { + type Int = scala.Int + def zero(i: Int, mask: Int) = (i & mask) == 0 + def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix + def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) + def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) + def complement(i: Int) = (-1) ^ i + def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) + def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) + } + object Int extends Int + + trait Long { + type Long = scala.Long + def zero(i: Long, mask: Long) = (i & mask) == 0L + def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix + def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) + def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) + def complement(i: Long) = (-1L) ^ i + def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) + def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) + } + object Long extends Long +} diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..7eba9433b8d5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,90 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A @uncheckedCaptures, Any] = _ + // @uncheckedCaptures OK since builder is used only locally when reading objects + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala new file mode 100644 index 000000000000..c309299b615b --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala @@ -0,0 +1,165 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic +import language.experimental.captureChecking + +/** A trait which can be used to avoid code duplication when defining extension + * methods that should be applicable both to existing Scala collections (i.e., + * types extending `Iterable`) as well as other (potentially user-defined) + * types that could be converted to a Scala collection type. This trait + * makes it possible to treat Scala collections and types that can be implicitly + * converted to a collection type uniformly. For example, one can provide + * extension methods that work both on collection types and on `String`s (`String`s + * do not extend `Iterable`, but can be converted to `Iterable`) + * + * `IsIterable` provides three members: + * + * 1. type member `A`, which represents the element type of the target `Iterable[A]` + * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type + * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. + * + * ===Usage=== + * + * One must provide `IsIterable` as an implicit parameter type of an implicit + * conversion. Its usage is shown below. Our objective in the following example + * is to provide a generic extension method `mapReduce` to any type that extends + * or can be converted to `Iterable`. In our example, this includes + * `String`. + * + * {{{ + * import scala.collection.{Iterable, IterableOps} + * import scala.collection.generic.IsIterable + * + * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { + * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { + * val iter = it(coll).iterator + * var res = mapper(iter.next()) + * while (iter.hasNext) + * res = reducer(res, mapper(iter.next())) + * res + * } + * } + * + * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = + * new ExtensionMethods(coll, it) + * + * // See it in action! + * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 + * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 + *}}} + * + * Here, we begin by creating a class `ExtensionMethods` which contains our + * `mapReduce` extension method. + * + * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where + * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. + * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to + * call the `iterator` method on it. + * The remaining of the implementation is straightforward. + * + * The `withExtensions` implicit conversion makes the `mapReduce` operation available + * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. + * Note how we keep track of the precise type of the implicit `it` argument by using the + * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that + * so that the information carried by the type members `A` and `C` of the `it` argument + * is not lost. + * + * When the `mapReduce` method is called on some type of which it is not + * a member, implicit search is triggered. Because implicit conversion + * `withExtensions` is generic, it will be applied as long as an implicit + * value of type `IsIterable[Repr]` can be found. Given that the + * `IsIterable` companion object contains implicit members that return values of type + * `IsIterable`, this requirement is typically satisfied, and the chain + * of interactions described in the previous paragraph is set into action. + * (See the `IsIterable` companion object, which contains a precise + * specification of the available implicits.) + * + * ''Note'': Currently, it's not possible to combine the implicit conversion and + * the class with the extension methods into an implicit class due to + * limitations of type inference. + * + * ===Implementing `IsIterable` for New Types=== + * + * One must simply provide an implicit value of type `IsIterable` + * specific to the new type, or an implicit conversion which returns an + * instance of `IsIterable` specific to the new type. + * + * Below is an example of an implementation of the `IsIterable` trait + * where the `Repr` type is `Range`. + * + *{{{ + * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = + * new IsIterable[Range] { + * type A = Int + * type C = IndexedSeq[Int] + * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll + * } + *}}} + * + * (Note that in practice the `IsIterable[Range]` instance is already provided by + * the standard library, and it is defined as an `IsSeq[Range]` instance) + */ +trait IsIterable[Repr] extends IsIterableOnce[Repr] { + + /** The type returned by transformation operations that preserve the same elements + * type (e.g. `filter`, `take`). + * + * In practice, this type is often `Repr` itself, excepted in the case + * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. + */ + type C + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ + def apply(coll: Repr): IterableOps[A, Iterable, C] + +} + +object IsIterable extends IsIterableLowPriority { + + // Straightforward case: IterableOps subclasses + implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = + new IsIterable[CC0[A0]] { + type A = A0 + type C = CC0[A0] + def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll + } + + // The `BitSet` type can not be unified with the `CC0` parameter of + // the above definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = + new IsIterable[C0] { + type A = Int + type C = C0 + def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll + } + +} + +trait IsIterableLowPriority { + + // Makes `IsSeq` instances visible in `IsIterable` companion + implicit def isSeqLikeIsIterable[Repr](implicit + isSeqLike: IsSeq[Repr] + ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike + + // Makes `IsMap` instances visible in `IsIterable` companion + implicit def isMapLikeIsIterable[Repr](implicit + isMapLike: IsMap[Repr] + ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala new file mode 100644 index 000000000000..2836ca2bb520 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala @@ -0,0 +1,72 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic +import language.experimental.captureChecking + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `IterableOnce[A]`. + * + * This type enables simple enrichment of `IterableOnce`s with extension + * methods which can make full use of the mechanics of the Scala collections + * framework in their implementation. + * + * Example usage, + * {{{ + * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { + * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { + * val b = bf.newBuilder(coll) + * for(e <- it(coll).iterator) f(e) foreach (b +=) + * b.result() + * } + * } + * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = + * new FilterMapImpl(coll, it) + * + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + */ +trait IsIterableOnce[Repr] { + + /** The type of elements we can traverse over (e.g. `Int`). */ + type A + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + val conversion: Repr => IterableOnce[A] = apply(_) + + /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ + def apply(coll: Repr): IterableOnce[A] + +} + +object IsIterableOnce extends IsIterableOnceLowPriority { + + // Straightforward case: IterableOnce subclasses + implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = + new IsIterableOnce[CC0[A0]] { + type A = A0 + def apply(coll: CC0[A0]): IterableOnce[A0] = coll + } + +} + +trait IsIterableOnceLowPriority { + + // Makes `IsIterable` instance visible in `IsIterableOnce` companion + implicit def isIterableLikeIsIterableOnce[Repr](implicit + isIterableLike: IsIterable[Repr] + ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala new file mode 100644 index 000000000000..ad7254d2dd61 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import IsMap.Tupled +import scala.collection.immutable.{IntMap, LongMap} +import language.experimental.captureChecking + +/** + * Type class witnessing that a collection type `Repr` + * has keys of type `K`, values of type `V` and has a conversion to + * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. + * + * This type enables simple enrichment of `Map`s with extension methods. + * + * @see [[scala.collection.generic.IsIterable]] + * @tparam Repr Collection type (e.g. `Map[Int, String]`) + */ +trait IsMap[Repr] extends IsIterable[Repr] { + + /** The type of keys */ + type K + + /** The type of values */ + type V + + type A = (K, V) + + /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` + * + * @note The third type parameter of the returned `MapOps` value is + * still `Iterable` (and not `Map`) because `MapView[K, V]` only + * extends `MapOps[K, V, View, View[A]]`. + */ + override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] + +} + +object IsMap { + + /** Convenient type level function that takes a unary type constructor `F[_]` + * and returns a binary type constructor that tuples its parameters and passes + * them to `F`. + * + * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. + */ + type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } + + // Map collections + implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = CC0[K0, V0] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c + } + + // MapView + implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = View[(K, V)] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c + } + + // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition + implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = + new IsMap[mutable.AnyRefMap[K0, V0]] { + type K = K0 + type V = V0 + type C = mutable.AnyRefMap[K0, V0] + def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c + } + + // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters + implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = + new IsMap[IntMap[V0]] { + type K = Int + type V = V0 + type C = IntMap[V0] + def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c + } + + // LongMap is in a similar situation as IntMap + implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = + new IsMap[LongMap[V0]] { + type K = Long + type V = V0 + type C = LongMap[V0] + def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c + } + + // mutable.LongMap is in a similar situation as LongMap and IntMap + implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = + new IsMap[mutable.LongMap[V0]] { + type K = Long + type V = V0 + type C = mutable.LongMap[V0] + def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c + } + + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala index 84fcd7be14df..8ad344c4d4fc 100644 --- a/tests/pos-special/stdlib/collection/generic/IsSeq.scala +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -14,6 +14,9 @@ package scala.collection package generic import scala.reflect.ClassTag +import language.experimental.captureChecking +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Type class witnessing that a collection representation type `Repr` has * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for @@ -51,23 +54,26 @@ object IsSeq { implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] -/* Under cc, views are not Seqs and can't use SeqOps. - - implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = - new IsSeq[CC0[A0]] { + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to seqViewIsIterable + */ + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsIterable[CC0[A0]] { type A = A0 type C = View[A] - def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll } - implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = - new IsSeq[StringView] { + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to stringViewIsIterable + */ + implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } = + new IsIterable[StringView] { type A = Char type C = View[Char] - def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll + def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll } -*/ implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = new IsSeq[String] { type A = Char @@ -78,7 +84,7 @@ object IsSeq { def apply(i: Int): Char = s.charAt(i) def toIterable: Iterable[Char] = new immutable.WrappedString(s) protected[this] def coll: String = s - protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString + protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged override def empty: String = "" protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder @@ -86,7 +92,7 @@ object IsSeq { } } - implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + implicit def arrayIsSeq[sealed A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = new IsSeq[Array[A0]] { type A = A0 type C = Array[A0] @@ -94,9 +100,9 @@ object IsSeq { new SeqOps[A, mutable.ArraySeq, Array[A]] { def apply(i: Int): A = a(i) def length: Int = a.length - def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + def toIterable: Iterable[A] = mutable.ArraySeq.make[A @uncheckedCaptures](a) protected def coll: Array[A] = a - protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) + protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll) def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged override def empty: Array[A] = Array.empty[A] protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala new file mode 100644 index 000000000000..2c0967dbaf4b --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala @@ -0,0 +1,63 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic +import language.experimental.captureChecking + +/** This trait represents collection-like objects that can be reduced + * using a '+' operator. It defines variants of `-` and `--` + * as convenience methods in terms of single-element removal `-`. + * + * @tparam A the type of the elements of the $coll. + * @tparam Repr the type of the $coll itself + * @define coll collection + * @define Coll Subtractable + */ +@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") +trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => + + /** The representation object of type `Repr` which contains the collection's elements + */ + protected def repr: Repr + + /** Creates a new $coll from this $coll with an element removed. + * @param elem the element to remove + * @return a new collection that contains all elements of the current $coll + * except one less occurrence of `elem`. + */ + def -(elem: A): Repr + + /** Creates a new $coll from this $coll with some elements removed. + * + * This method takes two or more elements to be removed. Another overloaded + * variant of this method handles the case where a single element is + * removed. + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the given elements. + */ + def -(elem1: A, elem2: A, elems: A*): Repr = + this - elem1 - elem2 -- elems + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param xs the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) +} diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala new file mode 100644 index 000000000000..0ba67c1bf76e --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/package.scala @@ -0,0 +1,35 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +import language.experimental.captureChecking + + +package object generic { + @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") + type Clearable = scala.collection.mutable.Clearable + + @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") + type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] + + @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") + type Growable[-A] = scala.collection.mutable.Growable[A] + + @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") + type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] + + @deprecated("Use IsIterable instead", "2.13.0") + type IsTraversableLike[Repr] = IsIterable[Repr] + + @deprecated("Use IsIterableOnce instead", "2.13.0") + type IsTraversableOnce[Repr] = IsIterableOnce[Repr] +} From 868d65bf0e27e614dafcd06ec5de3eab52a1d937 Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 11:14:12 +0100 Subject: [PATCH 123/216] Drop an unsafeAssumePure in Iterator --- tests/pos-special/stdlib/collection/Iterator.scala | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index cf722235008c..993e2fc0cfea 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -17,7 +17,6 @@ import scala.annotation.tailrec import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.runtime.Statics import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure import annotation.unchecked.uncheckedCaptures @@ -1146,9 +1145,7 @@ object Iterator extends IterableFactory[Iterator] { * Nested ConcatIterators are merged to avoid blowing the stack. */ private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { - private var current: Iterator[A @uncheckedCaptures] = from.unsafeAssumePure - // This should be Iteratpr[A]^, but fails since mutable variables can't capture cap. - // To do better we'd need to track nesting levels for universal capabiltities. + private var current: Iterator[A @uncheckedCaptures]^{cap[ConcatIterator]} = from private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var currentHasNextChecked = false From f4066c02a14fabd41c2356deaf9e2bcaaaf6f0ce Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 17:52:22 +0100 Subject: [PATCH 124/216] Better error message for capture errors involving self types --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ae9a6eaff83e..ac6bf0252e47 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -263,11 +263,12 @@ class CheckCaptures extends Recheck, SymTransformer: pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, + provenance: => String = "", cs1description: String = "")(using Context) = checkOK( cs1.subCaptures(cs2, frozen = false), - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head} is not" - else i"references $cs1 are not all", + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" + else i"references $cs1$cs1description are not all", pos, provenance) /** The current environment */ @@ -683,9 +684,15 @@ class CheckCaptures extends Recheck, SymTransformer: if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) + def selfType = impl.body + .collect: + case TypeDef(tpnme.SELF, rhs) => rhs + .headOption + .getOrElse(tree) + .orElse(tree) checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - tree.srcPos) + selfType.srcPos, cs1description = " captured by this self type") super.recheckClassDef(tree, impl, cls) finally curEnv = saved From e22744de00e65fa33622f3cf09b09b4a0f8dd174 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 10:54:57 +0100 Subject: [PATCH 125/216] Add sealed refs test and fix check files of other tests --- tests/neg-custom-args/captures/cc-this.check | 2 +- tests/neg-custom-args/captures/cc-this2.check | 14 +++++-- .../captures/cc-this2/D_2.scala | 2 +- .../captures/exception-definitions.check | 9 ++-- .../captures/exception-definitions.scala | 4 +- .../captures/sealed-refs.scala | 42 +++++++++++++++++++ 6 files changed, 60 insertions(+), 13 deletions(-) create mode 100644 tests/neg-custom-args/captures/sealed-refs.scala diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 335302c5c259..070e815d6d45 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -12,4 +12,4 @@ -- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (C4.this.f : () => Int) is not included in the allowed capture set {} of pure base class class C3 + |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index 5e43a45b67f5..bd9a1085d262 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,6 +1,12 @@ --- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- +3 | this: D^ => // error + | ^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C +-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- 2 |class D extends C: // error - |^ - |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class C -3 | this: D^ => + | ^ + | illegal inheritance: self type D^ of class D does not conform to self type C + | of parent class C + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index b22e5e456092..de1a722f73a9 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: D^ => + this: D^ => // error diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 4b1fe0273f52..72b88f252e59 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,8 +1,7 @@ --- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- -2 |class Err extends Exception: // error - |^ - |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class Throwable -3 | self: Err^ => +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- +3 | self: Err^ => // error + | ^^^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index a19b751825b8..fbc9f3fd1d33 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,6 +1,6 @@ -class Err extends Exception: // error - self: Err^ => +class Err extends Exception: + self: Err^ => // error def test(c: Any^) = class Err2 extends Exception: diff --git a/tests/neg-custom-args/captures/sealed-refs.scala b/tests/neg-custom-args/captures/sealed-refs.scala new file mode 100644 index 000000000000..05fa483acf28 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-refs.scala @@ -0,0 +1,42 @@ +class Ref[sealed A](init: A): + this: Ref[A]^ => + private var x: A = init + def get: A = x + def set(x: A): Unit = this.x = x + +class It[X]: + this: It[X]^ => + +def f1[B1](x: B1, next: B1 -> B1) = + var r = x // ok + r = next(x) + r + +def f2[B2](x: B2, next: B2 -> B2) = + val r = Ref[B2](x) // error + r.set(next(x)) + r.get + +def g[sealed B](x: B, next: B -> B) = + val r = Ref[B](x) // ok + r.set(next(x)) + r.get + +import annotation.unchecked.uncheckedCaptures + +def h[B](x: B, next: B -> B) = + val r = Ref[B @uncheckedCaptures](x) // ok + r.set(next(x)) + r.get + +def f3[B](x: B, next: B -> B) = + val r: Ref[B^{cap[f3]}] = Ref[B^{cap[f3]}](x) // error + r.set(next(x)) + val y = r.get + () + +def f4[B](x: B, next: B -> B) = + val r: Ref[B]^{cap[f4]} = Ref[B](x) // error + r.set(next(x)) + val y = r.get + () \ No newline at end of file From cfec1d0a2a92b5a23b38f1f6219e215cf0c19d58 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 11:29:22 +0100 Subject: [PATCH 126/216] Simplify CapturedVars phase No need for a separate entry in the context's store; we can keep everything in the phase itself, which is more efficient and modular. --- .../tools/dotc/transform/CapturedVars.scala | 55 +++++++------------ 1 file changed, 20 insertions(+), 35 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index a018bbd1a3ac..202e3d72fa25 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -13,25 +13,20 @@ import core.NameKinds.TempResultName import core.Constants._ import util.Store import dotty.tools.uncheckedNN - -import scala.compiletime.uninitialized +import ast.tpd.* +import compiletime.uninitialized /** This phase translates variables that are captured in closures to * heap-allocated refs. */ class CapturedVars extends MiniPhase with IdentityDenotTransformer: thisPhase => - import ast.tpd._ override def phaseName: String = CapturedVars.name override def description: String = CapturedVars.description - private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = uninitialized - private def captured(using Context) = ctx.store(Captured) - - override def initContext(ctx: FreshContext): Unit = - Captured = ctx.addLocation(util.ReadOnlySet.empty) + private val captured = util.HashSet[Symbol]() private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -57,33 +52,10 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: myRefInfo.uncheckedNN } - private class CollectCaptured extends TreeTraverser { - private val captured = util.HashSet[Symbol]() - def traverse(tree: Tree)(using Context) = tree match { - case id: Ident => - val sym = id.symbol - if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { - val enclMeth = ctx.owner.enclosingMethod - if (sym.enclosingMethod != enclMeth) { - report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") - captured += sym - } - } - case _ => - traverseChildren(tree) - } - def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { - traverse(tree) - captured - } - } - - override def prepareForUnit(tree: Tree)(using Context): Context = { - val captured = atPhase(thisPhase) { - CollectCaptured().runOver(ctx.compilationUnit.tpdTree) - } - ctx.fresh.updateStore(Captured, captured) - } + override def prepareForUnit(tree: Tree)(using Context): Context = + captured.clear() + atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree) + ctx /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile @@ -143,3 +115,16 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: object CapturedVars: val name: String = "capturedVars" val description: String = "represent vars captured by closures as heap objects" + + def collect(captured: util.HashSet[Symbol]): TreeTraverser = new: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") + captured += sym + case _ => + traverseChildren(tree) +end CapturedVars From 321d7e0bb2cd7323d8713e28fa65a46c9068278a Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Sat, 4 Nov 2023 09:42:23 -0400 Subject: [PATCH 127/216] Modify test --- tests/init-global/pos/i18629.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala index 029319a5785b..f97c21ee918d 100644 --- a/tests/init-global/pos/i18629.scala +++ b/tests/init-global/pos/i18629.scala @@ -1,5 +1,6 @@ object Foo { val bar = List() match { case List() => ??? + case _ => ??? } } From 5e49b124f5e735ecfbbaf48963d184d09c090194 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 12:20:15 +0100 Subject: [PATCH 128/216] Refine isParametric tests Mutable variables can appeal to parametricty only if they are not captured. We use "not captured by any closure" as a sound approximation for that, since variables themselves are currently not tracked, so we cannot use soemthing more finegrained. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 89 +++++++++++++++---- tests/neg-custom-args/captures/buffers.check | 6 +- tests/neg-custom-args/captures/levels.check | 4 +- .../captures/sealed-leaks.check | 50 +++++++++++ .../captures/sealed-leaks.scala | 32 ++++++- .../stdlib/collection/Iterator.scala | 2 +- .../immutable/LazyListIterable.scala | 20 ++--- .../collection/immutable/TreeSeqMap.scala | 2 +- .../stdlib/collection/immutable/Vector.scala | 2 +- 9 files changed, 173 insertions(+), 34 deletions(-) create mode 100644 tests/neg-custom-args/captures/sealed-leaks.check diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index ac6bf0252e47..a49bd9f79351 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -16,7 +16,7 @@ import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.SymUtils.* -import transform.{Recheck, PreRecheck} +import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} @@ -149,15 +149,25 @@ object CheckCaptures: private val seen = new EqHashSet[TypeRef] + /** Check that there is at least one method containing carrier and defined + * in the scope of tparam. E.g. this is OK: + * def f[T] = { ... var x: T ... } + * So is this: + * class C[T] { def f() = { class D { var x: T }}} + * But this is not OK: + * class C[T] { object o { var x: T }} + */ extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - val encl = carrier.maybeOwner.enclosingMethodOrClass - if encl.isClass then tparam.isParametricIn(encl) - else - def recur(encl: Symbol): Boolean = - if tparam.owner == encl then true - else if encl.isStatic || !encl.exists then false - else recur(encl.owner.enclosingMethodOrClass) - recur(encl) + carrier.exists && { + val encl = carrier.owner.enclosingMethodOrClass + if encl.isClass then tparam.isParametricIn(encl) + else + def recur(encl: Symbol): Boolean = + if tparam.owner == encl then true + else if encl.isStatic || !encl.exists then false + else recur(encl.owner.enclosingMethodOrClass) + recur(encl) + } def traverse(t: Type) = t.dealiasKeepAnnots match @@ -168,9 +178,12 @@ object CheckCaptures: t.info match case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => if hi.isAny then + val detailStr = + if t eq tp then "variable" + else i"refers to the type variable $t, which" report.error( em"""$what cannot $have $tp since - |that type refers to the type variable $t, which is not sealed. + |that type $detailStr is not sealed. |$addendum""", pos) else @@ -549,7 +562,7 @@ class CheckCaptures extends Recheck, SymTransformer: for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do if formal.isSealed then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" - disallowRootCapabilitiesIn(arg.knownType, fn.symbol, + disallowRootCapabilitiesIn(arg.knownType, NoSymbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) @@ -590,13 +603,58 @@ class CheckCaptures extends Recheck, SymTransformer: openClosures = openClosures.tail end recheckClosureBlock + /** Maps mutable variables to the symbols that capture them (in the + * CheckCaptures sense, i.e. symbol is referred to from a different method + * than the one it is defined in). + */ + private val capturedBy = util.HashMap[Symbol, Symbol]() + + /** Maps anonymous functions appearing as function arguments to + * the function that is called. + */ + private val anonFunCallee = util.HashMap[Symbol, Symbol]() + + /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`. + */ + private def collectCapturedMutVars(using Context) = new TreeTraverser: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + capturedBy(sym) = enclMeth + case Apply(fn, args) => + for case closureDef(mdef) <- args do + anonFunCallee(mdef.symbol) = fn.symbol + traverseChildren(tree) + case Inlined(_, bindings, expansion) => + traverse(bindings) + traverse(expansion) + case mdef: DefDef => + if !mdef.symbol.isInlineMethod then traverseChildren(tree) + case _ => + traverseChildren(tree) + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = try if sym.is(Module) then sym.info // Modules are checked by checking the module class else if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then - disallowRootCapabilitiesIn(tree.tpt.knownType, sym, - i"mutable $sym", "have type", "", sym.srcPos) + val (carrier, addendum) = capturedBy.get(sym) match + case Some(encl) => + val enclStr = + if encl.isAnonymousFunction then + val location = anonFunCallee.get(encl) match + case Some(meth) if meth.exists => i" argument in a call to $meth" + case _ => "" + s"an anonymous function$location" + else encl.show + (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr") + case _ => + (sym, "") + disallowRootCapabilitiesIn( + tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then @@ -1170,11 +1228,12 @@ class CheckCaptures extends Recheck, SymTransformer: private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup] override def checkUnit(unit: CompilationUnit)(using Context): Unit = - setup.setupUnit(ctx.compilationUnit.tpdTree, completeDef) + setup.setupUnit(unit.tpdTree, completeDef) + collectCapturedMutVars.traverse(unit.tpdTree) if ctx.settings.YccPrintSetup.value then val echoHeader = "[[syntax tree at end of cc setup]]" - val treeString = show(ctx.compilationUnit.tpdTree) + val treeString = show(unit.tpdTree) report.echo(s"$echoHeader\n$treeString\n") withCaptureSetsExplained: diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check index cdb7baa852fb..07acea3c48e3 100644 --- a/tests/neg-custom-args/captures/buffers.check +++ b/tests/neg-custom-args/captures/buffers.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ 11 | var elems: Array[A] = new Array[A](10) // error // error | ^ - | mutable variable elems cannot have type Array[A] since + | Mutable variable elems cannot have type Array[A] since | that type refers to the type variable A, which is not sealed. -- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- 16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error @@ -14,13 +14,13 @@ 11 | var elems: Array[A] = new Array[A](10) // error // error | ^^^^^^^^ | Array cannot have element type A since - | that type refers to the type variable A, which is not sealed. + | that type variable is not sealed. | Since arrays are mutable, they have to be treated like variables, | so their element type must be sealed. -- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ 22 | val x: Array[A] = new Array[A](10) // error | ^^^^^^^^ | Array cannot have element type A since - | that type refers to the type variable A, which is not sealed. + | that type variable is not sealed. | Since arrays are mutable, they have to be treated like variables, | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index f91f90fb652f..c0cc7f0a759c 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,8 +1,8 @@ -- Error: tests/neg-custom-args/captures/levels.scala:6:16 ------------------------------------------------------------- 6 | private var v: T = init // error | ^ - | mutable variable v cannot have type T since - | that type refers to the type variable T, which is not sealed. + | Mutable variable v cannot have type T since + | that type variable is not sealed. -- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ 17 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-custom-args/captures/sealed-leaks.check b/tests/neg-custom-args/captures/sealed-leaks.check new file mode 100644 index 000000000000..f7098eba32b6 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-leaks.check @@ -0,0 +1,50 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/sealed-leaks.scala:31:6 ------------------------------ +31 | () + | ^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:12:27 ------------------------------------------------------ +12 | val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to (() => Unit) | Null since + | that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of method usingLogFile + | leaking as part of its result. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/sealed-leaks.scala:19:26 --------------------------------- +19 | usingLogFile { f => x = f } // error + | ^ + | Found: (f : java.io.FileOutputStream^) + | Required: (java.io.FileOutputStream | Null)^{cap[Test2]} + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:30:10 ------------------------------------------------------ +30 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:39:8 ------------------------------------------------------- +39 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. + | + | Note that variable x does not count as local since it is captured by an anonymous function +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:43:8 ------------------------------------------------------- +43 | var x: T = y // error + | ^ + |Mutable variable x cannot have type T since + |that type variable is not sealed. + | + |Note that variable x does not count as local since it is captured by an anonymous function argument in a call to method identity +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:47:8 ------------------------------------------------------- +47 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. + | + | Note that variable x does not count as local since it is captured by method foo +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:11:14 ------------------------------------------------------ +11 | val later = usingLogFile { f => () => f.write(0) } // error + | ^^^^^^^^^^^^ + | local reference f leaks into outer capture set of type parameter T of method usingLogFile diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala index a7acf77b5678..2555ba8a3e07 100644 --- a/tests/neg-custom-args/captures/sealed-leaks.scala +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -18,4 +18,34 @@ def Test2 = usingLogFile { f => x = f } // error - later() \ No newline at end of file + later() + +def Test3 = + def f[T](y: T) = + var x: T = y + () + + class C[T](y: T): + object o: + var x: T = y // error + () + + class C2[T](y: T): + def f = + var x: T = y // ok + () + + def g1[T](y: T): T => Unit = + var x: T = y // error + y => x = y + + def g2[T](y: T): T => Unit = + var x: T = y // error + identity(y => x = y) + + def g3[T](y: T): Unit = + var x: T = y // error + def foo = + x = y + () + diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 993e2fc0cfea..90fd387069b0 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -868,7 +868,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures] - var ahead: Iterator[A] = null + var ahead: Iterator[A @uncheckedCaptures] = null // ahead is captured by Partner, so A is not recognized as parametric class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { val thisSize = self.knownSize diff --git a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala index 8d804bad13de..5684130b6048 100644 --- a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala @@ -852,9 +852,9 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy else if (!isEmpty) { b.append(head) var cursor = this - @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) var scout = tail - @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { cursor = scout if (scoutNonEmpty) { @@ -998,7 +998,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[filterImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var elem: A = null.asInstanceOf[A] var found = false @@ -1015,7 +1015,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[collectImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { val marker = Statics.pfMarker val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased @@ -1034,9 +1034,9 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[flatMapImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { - var it: Iterator[B]^{ll, f} = null + var it: Iterator[B @uncheckedCaptures]^{ll, f} = null var itHasNext = false var rest = restRef // var rest = restRef.elem while (!itHasNext && !rest.isEmpty) { @@ -1058,7 +1058,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric var iRef = n // val iRef = new IntRef(n) newLL { var rest = restRef // var rest = restRef.elem @@ -1075,7 +1075,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropWhileImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var rest = restRef // var rest = restRef.elem while (!rest.isEmpty && p(rest.head)) { @@ -1088,8 +1088,8 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var scoutRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // same situation var remainingRef = n // val remainingRef = new IntRef(n) newLL { var scout = scoutRef // var scout = scoutRef.elem diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala index d7cceb54cca3..91233669e5ca 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -609,7 +609,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { } final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { - var rear = Ordering.empty[T] + var rear: Ordering[T @uncheckedCaptures] = Ordering.empty[T] var i = n (modifyOrRemove { (o, v) => i -= 1 diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala index 1bde30406fd9..d9d33add512d 100644 --- a/tests/pos-special/stdlib/collection/immutable/Vector.scala +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -229,7 +229,7 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va // k >= 0, k = suffix.knownSize val tinyAppendLimit = 4 + vectorSliceCount if (k < tinyAppendLimit) { - var v: Vector[B] = this + var v: Vector[B @uncheckedCaptures] = this suffix match { case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) case _ => suffix.iterator.foreach(x => v = v.appended(x)) From b09e9006ed92c421eb0925e653b8a1261e526a1f Mon Sep 17 00:00:00 2001 From: Fengyun Liu Date: Sun, 5 Nov 2023 10:33:52 +0100 Subject: [PATCH 129/216] Ignore Quote/Slice in init checker We should never be able to encounter them in usage of macros --- because macros will expand to normal trees without quote and splices. The particular test case is actually problematic: The macro did not expand in `base_0.scala`. --- .../tools/dotc/transform/init/Semantic.scala | 4 +- tests/init/pos/i18407/base_0.scala | 4 ++ tests/init/pos/i18407/macros_0.scala | 37 +++++++++++++++++++ tests/init/pos/i18407/test_1.scala | 4 ++ 4 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 tests/init/pos/i18407/base_0.scala create mode 100644 tests/init/pos/i18407/macros_0.scala create mode 100644 tests/init/pos/i18407/test_1.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index b75a688d6e6c..499c2d289783 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -1383,11 +1383,11 @@ object Semantic: case tpl: Template => init(tpl, thisV, klass) - case _: Import | _: Export => + case _: Import | _: Export | _: Quote | _: Splice | _: QuotePattern | _: SplicePattern => Hot case _ => - report.warning("[Internal error] unexpected tree" + Trace.show, expr) + report.warning("[Internal error] unexpected tree: " + expr.getClass + ", trace:\n" + Trace.show, expr) Hot /** Handle semantics of leaf nodes diff --git a/tests/init/pos/i18407/base_0.scala b/tests/init/pos/i18407/base_0.scala new file mode 100644 index 000000000000..892ff56e2ab1 --- /dev/null +++ b/tests/init/pos/i18407/base_0.scala @@ -0,0 +1,4 @@ +// base_0.scala +trait BaseTest extends AnyFreeSpecLike { + "empty-test" - {} // ok if we comment out this line +} diff --git a/tests/init/pos/i18407/macros_0.scala b/tests/init/pos/i18407/macros_0.scala new file mode 100644 index 000000000000..83a5cb7a81c2 --- /dev/null +++ b/tests/init/pos/i18407/macros_0.scala @@ -0,0 +1,37 @@ +// macros_0.scala +object source { + import scala.quoted._ + + class Position() + + object Position { + def withPosition[T]( + fun: Expr[Position => T] + )(using quotes: Quotes, typeOfT: Type[T]): Expr[T] = { + '{ + ${ fun }.apply(new source.Position()) + } + } + } +} + +trait AnyFreeSpecLike { + import scala.language.implicitConversions + + protected final class FreeSpecStringWrapper( + string: String, + pos: source.Position + ) { + def -(fun: => Unit): Unit = fun + } + + inline implicit def convertToFreeSpecStringWrapper( + s: String + ): FreeSpecStringWrapper = { + ${ + source.Position.withPosition[FreeSpecStringWrapper]('{ + (pos: source.Position) => new FreeSpecStringWrapper(s, pos) + }) + } + } +} diff --git a/tests/init/pos/i18407/test_1.scala b/tests/init/pos/i18407/test_1.scala new file mode 100644 index 000000000000..d3050da180b1 --- /dev/null +++ b/tests/init/pos/i18407/test_1.scala @@ -0,0 +1,4 @@ +class MyTest extends BaseTest { + "empty-test" - {} + private val myObject = new {} +} From 592d0ec4ea7683c71bcfccce6a0c056eb9123f14 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 3 Nov 2023 13:49:15 +0100 Subject: [PATCH 130/216] Update import wildcard from `_` to `*` in compiler --- .../src/dotty/tools/MainGenericRunner.scala | 2 +- .../tools/backend/jvm/BCodeAsmCommon.scala | 4 +- .../tools/backend/jvm/BCodeBodyBuilder.scala | 38 +- .../tools/backend/jvm/BCodeHelpers.scala | 36 +- .../tools/backend/jvm/BCodeIdiomatic.scala | 10 +- .../tools/backend/jvm/BCodeSkelBuilder.scala | 8 +- .../tools/backend/jvm/BCodeSyncAndTry.scala | 8 +- .../src/dotty/tools/backend/jvm/BTypes.scala | 2 +- .../tools/backend/jvm/BTypesFromSymbols.scala | 18 +- .../tools/backend/jvm/BackendUtils.scala | 8 +- .../tools/backend/jvm/ClassfileWriters.scala | 6 +- .../src/dotty/tools/backend/jvm/CodeGen.scala | 18 +- .../tools/backend/jvm/CollectSuperCalls.scala | 8 +- .../dotty/tools/backend/jvm/CoreBTypes.scala | 10 +- .../backend/jvm/DottyBackendInterface.scala | 16 +- .../dotty/tools/backend/jvm/GenBCode.scala | 8 +- .../backend/jvm/GeneratedClassHandler.scala | 2 +- .../backend/jvm/GenericSignatureVisitor.scala | 6 +- .../jvm/PostProcessorFrontendAccess.scala | 2 +- .../tools/backend/jvm/scalaPrimitives.scala | 10 +- .../dotty/tools/backend/sjs/GenSJSIR.scala | 6 +- .../dotty/tools/backend/sjs/JSCodeGen.scala | 50 +- .../tools/backend/sjs/JSDefinitions.scala | 12 +- .../dotty/tools/backend/sjs/JSEncoding.scala | 20 +- .../tools/backend/sjs/JSExportsGen.scala | 34 +- .../dotty/tools/backend/sjs/JSPositions.scala | 8 +- .../tools/backend/sjs/JSPrimitives.scala | 12 +- compiler/src/dotty/tools/dotc/Bench.scala | 2 +- .../dotty/tools/dotc/CompilationUnit.scala | 10 +- compiler/src/dotty/tools/dotc/Compiler.scala | 6 +- compiler/src/dotty/tools/dotc/Driver.scala | 6 +- compiler/src/dotty/tools/dotc/Resident.scala | 2 +- compiler/src/dotty/tools/dotc/Run.scala | 14 +- .../src/dotty/tools/dotc/ScalacCommand.scala | 2 +- .../tools/dotc/ast/CheckTrees.scala.disabled | 8 +- .../src/dotty/tools/dotc/ast/Desugar.scala | 16 +- .../dotty/tools/dotc/ast/DesugarEnums.scala | 12 +- .../dotty/tools/dotc/ast/MainProxies.scala | 16 +- .../dotty/tools/dotc/ast/NavigateAST.scala | 6 +- .../src/dotty/tools/dotc/ast/Positioned.scala | 10 +- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 16 +- .../tools/dotc/ast/TreeMapWithImplicits.scala | 10 +- .../dotty/tools/dotc/ast/TreeTypeMap.scala | 12 +- compiler/src/dotty/tools/dotc/ast/Trees.scala | 8 +- compiler/src/dotty/tools/dotc/ast/tpd.scala | 12 +- compiler/src/dotty/tools/dotc/ast/untpd.scala | 8 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- .../dotc/classpath/ClassPathFactory.scala | 4 +- .../dotc/classpath/DirectoryClassPath.scala | 12 +- .../classpath/VirtualDirectoryClassPath.scala | 2 +- .../ZipAndJarFileLookupFactory.scala | 4 +- .../dotc/classpath/ZipArchiveFileLookup.scala | 2 +- .../dotty/tools/dotc/config/CliCommand.scala | 4 +- .../tools/dotc/config/CompilerCommand.scala | 6 +- .../src/dotty/tools/dotc/config/Feature.scala | 6 +- .../tools/dotc/config/JavaPlatform.scala | 12 +- .../dotty/tools/dotc/config/OutputDirs.scala | 2 +- .../tools/dotc/config/PathResolver.scala | 8 +- .../dotty/tools/dotc/config/Platform.scala | 2 +- .../dotty/tools/dotc/config/SJSPlatform.scala | 6 +- .../tools/dotc/config/ScalaSettings.scala | 4 +- .../tools/dotc/config/ScalaVersion.scala | 2 +- .../dotty/tools/dotc/config/Settings.scala | 2 +- .../tools/dotc/config/WrappedProperties.scala | 2 +- .../dotty/tools/dotc/core/Annotations.scala | 2 +- .../src/dotty/tools/dotc/core/Atoms.scala | 2 +- .../tools/dotc/core/CheckRealizable.scala | 6 +- .../src/dotty/tools/dotc/core/Comments.scala | 6 +- .../src/dotty/tools/dotc/core/Constants.scala | 4 +- .../dotty/tools/dotc/core/Constraint.scala | 2 +- .../tools/dotc/core/ConstraintHandling.scala | 10 +- .../tools/dotc/core/ConstraintRunInfo.scala | 2 +- .../dotty/tools/dotc/core/ContextOps.scala | 4 +- .../src/dotty/tools/dotc/core/Contexts.scala | 30 +- .../dotty/tools/dotc/core/Decorators.scala | 4 +- .../dotty/tools/dotc/core/Definitions.scala | 6 +- .../tools/dotc/core/DenotTransformers.scala | 14 +- .../dotty/tools/dotc/core/Denotations.scala | 26 +- .../tools/dotc/core/GadtConstraint.scala | 2 +- .../src/dotty/tools/dotc/core/Hashable.scala | 4 +- .../tools/dotc/core/JavaNullInterop.scala | 6 +- .../tools/dotc/core/MacroClassLoader.scala | 2 +- .../tools/dotc/core/MatchTypeTrace.scala | 4 +- compiler/src/dotty/tools/dotc/core/Mode.scala | 2 +- .../src/dotty/tools/dotc/core/NameKinds.scala | 12 +- .../src/dotty/tools/dotc/core/NameOps.scala | 6 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 2 +- .../src/dotty/tools/dotc/core/Names.scala | 2 +- .../tools/dotc/core/NullOpsDecorator.scala | 6 +- .../tools/dotc/core/OrderingConstraint.scala | 6 +- .../src/dotty/tools/dotc/core/ParamInfo.scala | 2 +- .../dotc/core/PatternTypeConstrainer.scala | 12 +- .../src/dotty/tools/dotc/core/Periods.scala | 2 +- .../src/dotty/tools/dotc/core/Phases.scala | 16 +- .../src/dotty/tools/dotc/core/Scopes.scala | 14 +- .../src/dotty/tools/dotc/core/Signature.scala | 4 +- .../src/dotty/tools/dotc/core/StdNames.scala | 6 +- .../dotty/tools/dotc/core/Substituters.scala | 2 +- .../tools/dotc/core/SymDenotations.scala | 14 +- .../dotty/tools/dotc/core/SymbolLoaders.scala | 10 +- .../src/dotty/tools/dotc/core/Symbols.scala | 32 +- .../tools/dotc/core/TypeApplications.scala | 14 +- .../dotty/tools/dotc/core/TypeComparer.scala | 14 +- .../dotty/tools/dotc/core/TypeErasure.scala | 16 +- .../dotty/tools/dotc/core/TypeErrors.scala | 18 +- .../src/dotty/tools/dotc/core/TypeOps.scala | 16 +- .../dotty/tools/dotc/core/TyperState.scala | 8 +- .../src/dotty/tools/dotc/core/Types.scala | 44 +- .../src/dotty/tools/dotc/core/Uniques.scala | 4 +- .../src/dotty/tools/dotc/core/Variances.scala | 4 +- .../core/classfile/ClassfileConstants.scala | 2 +- .../dotc/core/classfile/ClassfileParser.scala | 14 +- .../classfile/ClassfileTastyUUIDParser.scala | 16 +- .../dotc/core/tasty/CommentUnpickler.scala | 2 +- .../dotc/core/tasty/DottyUnpickler.scala | 8 +- .../tools/dotc/core/tasty/NameBuffer.scala | 8 +- .../dotc/core/tasty/PositionPickler.scala | 12 +- .../dotc/core/tasty/PositionUnpickler.scala | 4 +- .../dotc/core/tasty/TastyClassName.scala | 8 +- .../tools/dotc/core/tasty/TastyPickler.scala | 6 +- .../tools/dotc/core/tasty/TastyPrinter.scala | 8 +- .../dotc/core/tasty/TastyUnpickler.scala | 8 +- .../tools/dotc/core/tasty/TreePickler.scala | 20 +- .../tools/dotc/core/tasty/TreeUnpickler.scala | 52 +- .../core/unpickleScala2/PickleBuffer.scala | 4 +- .../core/unpickleScala2/Scala2Erasure.scala | 4 +- .../core/unpickleScala2/Scala2Unpickler.scala | 28 +- .../dotty/tools/dotc/coverage/Location.scala | 2 +- .../decompiler/DecompilationPrinter.scala | 2 +- .../dotc/decompiler/IDEDecompilerDriver.scala | 6 +- .../dotty/tools/dotc/decompiler/Main.scala | 2 +- .../dotc/decompiler/TASTYDecompiler.scala | 2 +- .../tools/dotc/fromtasty/ReadTasty.scala | 8 +- .../tools/dotc/fromtasty/TASTYCompiler.scala | 4 +- .../dotty/tools/dotc/fromtasty/TASTYRun.scala | 2 +- .../dotty/tools/dotc/inlines/Inliner.scala | 8 +- .../dotty/tools/dotc/inlines/Inlines.scala | 6 +- .../dotc/inlines/PrepareInlineable.scala | 22 +- .../tools/dotc/interactive/Completion.scala | 12 +- .../tools/dotc/interactive/Interactive.scala | 14 +- .../interactive/InteractiveCompiler.scala | 8 +- .../dotc/interactive/InteractiveDriver.scala | 22 +- .../tools/dotc/interactive/SourceTree.scala | 8 +- .../tools/dotc/parsing/CharArrayReader.scala | 2 +- .../tools/dotc/parsing/JavaParsers.scala | 28 +- .../tools/dotc/parsing/JavaScanners.scala | 8 +- .../dotty/tools/dotc/parsing/Parsers.scala | 34 +- .../dotty/tools/dotc/parsing/Scanners.scala | 8 +- .../tools/dotc/parsing/ScriptParsers.scala | 10 +- .../src/dotty/tools/dotc/parsing/Tokens.scala | 2 +- .../dotty/tools/dotc/parsing/package.scala | 4 +- .../dotc/parsing/xml/MarkupParserCommon.scala | 2 +- .../dotc/parsing/xml/MarkupParsers.scala | 12 +- .../dotc/parsing/xml/SymbolicXMLBuilder.scala | 12 +- .../tools/dotc/parsing/xml/Utility.scala | 4 +- .../src/dotty/tools/dotc/plugins/Plugin.scala | 8 +- .../dotty/tools/dotc/plugins/Plugins.scala | 8 +- .../tools/dotc/printing/Formatting.scala | 8 +- .../tools/dotc/printing/Highlighting.scala | 2 +- .../tools/dotc/printing/MessageLimiter.scala | 4 +- .../tools/dotc/printing/PlainPrinter.scala | 12 +- .../dotty/tools/dotc/printing/Printer.scala | 4 +- .../tools/dotc/printing/RefinedPrinter.scala | 36 +- .../tools/dotc/printing/ReplPrinter.scala | 12 +- .../dotty/tools/dotc/printing/Showable.scala | 4 +- .../dotc/printing/SyntaxHighlighting.scala | 8 +- .../dotty/tools/dotc/profile/Profiler.scala | 6 +- .../dotc/profile/ThreadPoolFactory.scala | 4 +- .../dotty/tools/dotc/quoted/Interpreter.scala | 26 +- .../tools/dotc/quoted/MacroExpansion.scala | 2 +- .../tools/dotc/quoted/PickledQuotes.scala | 20 +- .../tools/dotc/quoted/QuotePatterns.scala | 4 +- .../dotty/tools/dotc/quoted/QuoteUtils.scala | 6 +- .../dotty/tools/dotc/quoted/QuotesCache.scala | 4 +- .../dotc/quoted/reflect/FromSymbol.scala | 10 +- compiler/src/dotty/tools/dotc/report.scala | 10 +- .../dotc/reporting/ConsoleReporter.scala | 2 +- .../tools/dotc/reporting/Diagnostic.scala | 4 +- .../tools/dotc/reporting/DidYouMean.scala | 4 +- .../dotc/reporting/ExploringReporter.scala | 2 +- .../reporting/HideNonSensicalMessages.scala | 2 +- .../dotc/reporting/MessageRendering.scala | 10 +- .../dotty/tools/dotc/reporting/Reporter.scala | 10 +- .../tools/dotc/reporting/StoreReporter.scala | 4 +- .../tools/dotc/reporting/TestReporter.scala | 2 +- .../dotc/reporting/ThrowingReporter.scala | 2 +- .../reporting/UniqueMessagePositions.scala | 2 +- .../dotty/tools/dotc/reporting/WConf.scala | 6 +- .../dotty/tools/dotc/reporting/messages.scala | 18 +- .../dotty/tools/dotc/rewrites/Rewrites.scala | 4 +- .../src/dotty/tools/dotc/sbt/APIUtils.scala | 12 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 28 +- .../tools/dotc/sbt/ExtractDependencies.scala | 24 +- .../src/dotty/tools/dotc/sbt/ShowAPI.scala | 2 +- .../tools/dotc/semanticdb/ConstantOps.scala | 2 +- .../dotc/semanticdb/ExtractSemanticDB.scala | 18 +- .../dotty/tools/dotc/semanticdb/PPrint.scala | 2 +- .../dotty/tools/dotc/semanticdb/Scala3.scala | 8 +- .../semanticdb/SemanticSymbolBuilder.scala | 8 +- .../dotc/semanticdb/SyntheticsExtractor.scala | 6 +- .../dotty/tools/dotc/semanticdb/Tools.scala | 8 +- .../dotty/tools/dotc/semanticdb/TypeOps.scala | 8 +- .../dotc/semanticdb/generated/Access.scala | 178 +++---- .../semanticdb/generated/Annotation.scala | 26 +- .../dotc/semanticdb/generated/Constant.scala | 286 +++++------ .../semanticdb/generated/Diagnostic.scala | 50 +- .../semanticdb/generated/Documentation.scala | 50 +- .../dotc/semanticdb/generated/Language.scala | 18 +- .../dotc/semanticdb/generated/Location.scala | 26 +- .../dotc/semanticdb/generated/Range.scala | 32 +- .../dotc/semanticdb/generated/Schema.scala | 18 +- .../dotc/semanticdb/generated/Scope.scala | 24 +- .../dotc/semanticdb/generated/Signature.scala | 128 ++--- .../generated/SymbolInformation.scala | 136 +++--- .../generated/SymbolOccurrence.scala | 46 +- .../dotc/semanticdb/generated/Synthetic.scala | 26 +- .../semanticdb/generated/TextDocument.scala | 34 +- .../semanticdb/generated/TextDocuments.scala | 24 +- .../dotc/semanticdb/generated/Tree.scala | 222 ++++----- .../dotc/semanticdb/generated/Type.scala | 444 +++++++++--------- .../internal/SemanticdbInputStream.scala | 2 +- .../internal/SemanticdbOutputStream.scala | 2 +- .../tools/dotc/staging/CrossStageSafety.scala | 20 +- .../dotty/tools/dotc/staging/HealType.scala | 14 +- .../tools/dotc/staging/QuoteTypeTags.scala | 8 +- .../tools/dotc/staging/StagingLevel.scala | 10 +- .../dotc/staging/TreeMapWithStages.scala | 8 +- .../tools/dotc/transform/AccessProxies.scala | 24 +- .../tools/dotc/transform/ArrayApply.scala | 14 +- .../dotc/transform/ArrayConstructors.scala | 14 +- .../tools/dotc/transform/BeanProperties.scala | 20 +- .../tools/dotc/transform/BetaReduce.scala | 12 +- .../dotty/tools/dotc/transform/Bridges.scala | 8 +- .../tools/dotc/transform/CapturedVars.scala | 16 +- .../transform/CheckLoopingImplicits.scala | 2 +- .../dotc/transform/CheckNoSuperThis.scala | 2 +- .../tools/dotc/transform/CheckReentrant.scala | 14 +- .../tools/dotc/transform/CheckShadowing.scala | 6 +- .../tools/dotc/transform/CheckStatic.scala | 16 +- .../tools/dotc/transform/CheckUnused.scala | 12 +- .../dotc/transform/CollectEntryPoints.scala | 10 +- .../transform/CollectNullableFields.scala | 8 +- .../dotc/transform/CompleteJavaEnums.scala | 26 +- .../tools/dotc/transform/Constructors.scala | 26 +- .../transform/ContextFunctionResults.scala | 6 +- .../tools/dotc/transform/CookComments.scala | 4 +- .../dotc/transform/CountOuterAccesses.scala | 12 +- .../dotty/tools/dotc/transform/CtxLazy.scala | 2 +- .../tools/dotc/transform/Dependencies.scala | 2 +- .../tools/dotc/transform/DropBreaks.scala | 8 +- .../DropEmptyCompanions.scala.disabled | 16 +- .../dotc/transform/DropOuterAccessors.scala | 18 +- .../tools/dotc/transform/ElimByName.scala | 12 +- .../dotc/transform/ElimErasedValueType.scala | 14 +- .../tools/dotc/transform/ElimOpaque.scala | 18 +- .../dotc/transform/ElimOuterSelect.scala | 8 +- .../dotc/transform/ElimPackagePrefixes.scala | 8 +- .../dotc/transform/ElimPolyFunction.scala | 12 +- .../tools/dotc/transform/ElimRepeated.scala | 22 +- .../tools/dotc/transform/ElimStaticThis.scala | 8 +- .../dotty/tools/dotc/transform/Erasure.scala | 48 +- .../tools/dotc/transform/EtaReduce.scala | 2 +- .../tools/dotc/transform/ExpandPrivate.scala | 18 +- .../tools/dotc/transform/ExpandSAMs.scala | 12 +- .../tools/dotc/transform/ExplicitOuter.scala | 28 +- .../tools/dotc/transform/ExplicitSelf.scala | 6 +- .../dotc/transform/ExtensionMethods.scala | 18 +- .../tools/dotc/transform/FirstTransform.scala | 24 +- .../dotty/tools/dotc/transform/Flatten.scala | 8 +- .../dotc/transform/FullParameterization.scala | 16 +- .../transform/FunctionXXLForwarders.scala | 20 +- .../dotc/transform/GenericSignatures.scala | 16 +- .../dotty/tools/dotc/transform/Getters.scala | 20 +- .../tools/dotc/transform/HoistSuperArgs.scala | 20 +- .../tools/dotc/transform/InlinePatterns.scala | 12 +- .../tools/dotc/transform/InlineVals.scala | 12 +- .../dotty/tools/dotc/transform/Inlining.scala | 16 +- .../dotc/transform/Instrumentation.scala | 18 +- .../dotc/transform/InterceptedMethods.scala | 10 +- .../IsInstanceOfEvaluator.scala.disabled | 10 +- .../tools/dotc/transform/LambdaLift.scala | 26 +- .../dotty/tools/dotc/transform/LazyVals.scala | 8 +- .../tools/dotc/transform/LetOverApply.scala | 8 +- .../dotc/transform/Literalize.scala.disabled | 20 +- .../dotc/transform/MacroAnnotations.scala | 2 +- .../tools/dotc/transform/MacroTransform.scala | 10 +- .../tools/dotc/transform/MegaPhase.scala | 10 +- .../dotty/tools/dotc/transform/Memoize.scala | 26 +- .../dotty/tools/dotc/transform/Mixin.scala | 34 +- .../dotty/tools/dotc/transform/MixinOps.scala | 12 +- .../tools/dotc/transform/MoveStatics.scala | 18 +- .../dotc/transform/NonLocalReturns.scala | 12 +- .../dotc/transform/OverridingPairs.scala | 6 +- .../dotc/transform/ParamForwarding.scala | 8 +- .../tools/dotc/transform/PatternMatcher.scala | 26 +- .../tools/dotc/transform/PickleQuotes.scala | 32 +- .../dotty/tools/dotc/transform/Pickler.scala | 16 +- .../tools/dotc/transform/PostInlining.scala | 2 +- .../tools/dotc/transform/PostTyper.scala | 14 +- .../dotc/transform/ProtectedAccessors.scala | 12 +- .../dotc/transform/PruneErasedDefs.scala | 18 +- .../tools/dotc/transform/PureStats.scala | 10 +- .../tools/dotc/transform/ReifiedReflect.scala | 26 +- .../transform/RepeatableAnnotations.scala | 18 +- .../tools/dotc/transform/ResolveSuper.scala | 32 +- .../tools/dotc/transform/RestoreScopes.scala | 10 +- .../tools/dotc/transform/SelectStatic.scala | 14 +- .../tools/dotc/transform/SeqLiterals.scala | 8 +- .../tools/dotc/transform/SetRootTree.scala | 2 +- .../transform/SpecializeApplyMethods.scala | 8 +- .../dotc/transform/SpecializeFunctions.scala | 8 +- .../dotty/tools/dotc/transform/Splicer.scala | 14 +- .../dotty/tools/dotc/transform/Splicing.scala | 32 +- .../dotty/tools/dotc/transform/Staging.scala | 16 +- .../tools/dotc/transform/SuperAccessors.scala | 14 +- .../dotty/tools/dotc/transform/SymUtils.scala | 24 +- .../dotc/transform/SyntheticMembers.scala | 20 +- .../dotty/tools/dotc/transform/TailRec.scala | 2 +- .../dotc/transform/TransformWildcards.scala | 8 +- .../tools/dotc/transform/TreeChecker.scala | 36 +- .../tools/dotc/transform/TreeExtractors.scala | 8 +- .../dotc/transform/TryCatchPatterns.scala | 10 +- .../dotc/transform/TupleOptimizations.scala | 20 +- .../tools/dotc/transform/TypeTestsCasts.scala | 20 +- .../tools/dotc/transform/TypeUtils.scala | 8 +- .../dotc/transform/UncacheGivenAliases.scala | 12 +- .../dotc/transform/UninitializedDefs.scala | 10 +- .../dotc/transform/VCElideAllocations.scala | 10 +- .../dotc/transform/VCInlineMethods.scala | 10 +- .../tools/dotc/transform/ValueClasses.scala | 16 +- .../dotc/transform/YCheckPositions.scala | 10 +- .../tools/dotc/transform/init/Checker.scala | 20 +- .../tools/dotc/transform/init/Errors.scala | 6 +- .../tools/dotc/transform/init/Trace.scala | 2 +- .../transform/localopt/FormatChecker.scala | 6 +- .../localopt/StringInterpolatorOpt.scala | 10 +- .../tools/dotc/transform/patmat/Space.scala | 34 +- .../transform/sjs/AddLocalJSFakeNews.scala | 10 +- .../transform/sjs/ExplicitJSClasses.scala | 32 +- .../dotc/transform/sjs/JSExportUtils.scala | 4 +- .../tools/dotc/transform/sjs/JSSymUtils.scala | 24 +- .../transform/sjs/JUnitBootstrappers.scala | 24 +- .../dotc/transform/sjs/PrepJSExports.scala | 24 +- .../dotc/transform/sjs/PrepJSInterop.scala | 32 +- .../dotty/tools/dotc/typer/Applications.scala | 40 +- .../src/dotty/tools/dotc/typer/Checking.scala | 46 +- .../dotty/tools/dotc/typer/ConstFold.scala | 20 +- .../src/dotty/tools/dotc/typer/Deriving.scala | 16 +- .../dotty/tools/dotc/typer/Docstrings.scala | 4 +- .../src/dotty/tools/dotc/typer/Dynamic.scala | 4 +- .../tools/dotc/typer/ErrorReporting.scala | 16 +- .../dotty/tools/dotc/typer/EtaExpansion.scala | 22 +- .../dotty/tools/dotc/typer/Implicits.scala | 40 +- .../dotty/tools/dotc/typer/ImportInfo.scala | 4 +- .../tools/dotc/typer/ImportSuggestions.scala | 10 +- .../dotty/tools/dotc/typer/Inferencing.scala | 20 +- .../dotty/tools/dotc/typer/JavaChecks.scala | 4 +- .../src/dotty/tools/dotc/typer/Namer.scala | 34 +- .../dotty/tools/dotc/typer/Nullables.scala | 12 +- .../dotty/tools/dotc/typer/ProtoTypes.scala | 16 +- .../tools/dotc/typer/QuotesAndSplices.scala | 34 +- .../src/dotty/tools/dotc/typer/ReTyper.scala | 18 +- .../dotty/tools/dotc/typer/RefChecks.scala | 26 +- .../dotty/tools/dotc/typer/Synthesizer.scala | 22 +- .../dotty/tools/dotc/typer/TypeAssigner.scala | 12 +- .../src/dotty/tools/dotc/typer/Typer.scala | 76 +-- .../dotty/tools/dotc/typer/TyperPhase.scala | 10 +- .../tools/dotc/typer/VarianceChecker.scala | 14 +- .../tools/dotc/util/CommentParsing.scala | 2 +- .../src/dotty/tools/dotc/util/LRUCache.scala | 2 +- .../tools/dotc/util/NameTransformer.scala | 2 +- .../dotty/tools/dotc/util/ParsedComment.scala | 4 +- .../tools/dotc/util/ReusableInstance.scala | 2 +- .../dotty/tools/dotc/util/ShowPickled.scala | 4 +- .../dotty/tools/dotc/util/Signatures.scala | 10 +- .../tools/dotc/util/SixteenNibbles.scala | 2 +- .../dotty/tools/dotc/util/SourceFile.scala | 12 +- .../tools/dotc/util/SourcePosition.scala | 2 +- .../src/dotty/tools/dotc/util/Stats.scala | 2 +- .../src/dotty/tools/dotc/util/Store.scala | 2 +- .../dotty/tools/dotc/util/WeakHashSet.scala | 4 +- compiler/src/dotty/tools/dotc/util/kwords.sc | 6 +- compiler/src/dotty/tools/io/ClassPath.scala | 2 +- compiler/src/dotty/tools/io/File.scala | 2 +- compiler/src/dotty/tools/io/Jar.scala | 8 +- compiler/src/dotty/tools/io/JarArchive.scala | 2 +- compiler/src/dotty/tools/io/Path.scala | 6 +- compiler/src/dotty/tools/io/ZipArchive.scala | 4 +- .../tools/repl/CollectTopLevelImports.scala | 4 +- .../src/dotty/tools/repl/JLineTerminal.scala | 10 +- .../src/dotty/tools/repl/ParseResult.scala | 2 +- compiler/src/dotty/tools/repl/Rendering.scala | 2 +- .../src/dotty/tools/repl/ReplCommand.scala | 2 +- .../src/dotty/tools/repl/ReplCompiler.scala | 24 +- .../src/dotty/tools/repl/ReplDriver.scala | 18 +- .../src/dotty/tools/runner/ObjectRunner.scala | 2 +- .../quoted/runtime/impl/QuotesImpl.scala | 12 +- .../quoted/runtime/impl/ScopeException.scala | 2 +- .../quoted/runtime/impl/SpliceScope.scala | 2 +- .../runtime/impl/printers/Extractors.scala | 6 +- .../runtime/impl/printers/SourceCode.scala | 6 +- library/src/scala/Tuple.scala | 8 +- .../scala/annotation/MacroAnnotation.scala | 4 +- .../scala/annotation/constructorOnly.scala | 2 +- library/src/scala/annotation/newMain.scala | 4 +- library/src/scala/annotation/static.scala | 2 +- library/src/scala/compiletime/ops/any.scala | 10 +- .../src/scala/compiletime/ops/boolean.scala | 8 +- .../src/scala/compiletime/ops/double.scala | 32 +- library/src/scala/compiletime/ops/float.scala | 32 +- library/src/scala/compiletime/ops/int.scala | 50 +- library/src/scala/compiletime/ops/long.scala | 46 +- .../src/scala/compiletime/ops/string.scala | 10 +- library/src/scala/quoted/Expr.scala | 6 +- library/src/scala/quoted/ExprMap.scala | 2 +- library/src/scala/quoted/FromExpr.scala | 2 +- library/src/scala/quoted/Quotes.scala | 34 +- library/src/scala/quoted/ToExpr.scala | 20 +- library/src/scala/quoted/Varargs.scala | 4 +- library/src/scala/util/FromDigits.scala | 2 +- 420 files changed, 3209 insertions(+), 3209 deletions(-) diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 0a5a1b191a44..1540cc86d7a6 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -195,7 +195,7 @@ object MainGenericRunner { case ExecuteMode.PossibleRun => val newClasspath = (settings.classPath :+ ".").flatMap(_.split(classpathSeparator).filter(_.nonEmpty)).map(File(_).toURI.toURL) - import dotty.tools.runner.RichClassLoader._ + import dotty.tools.runner.RichClassLoader.* val newClassLoader = ScalaClassLoader.fromURLsParallelCapable(newClasspath) val targetToRun = settings.possibleEntryPaths.to(LazyList).find { entryPath => newClassLoader.tryToLoadClass(entryPath).orElse { diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala index d95638be2695..4027cf9fb564 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala @@ -4,8 +4,8 @@ package jvm import scala.language.unsafeNulls -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.report /** diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 3e9bbce2c0ae..f1029b702ee5 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -13,16 +13,16 @@ import BCodeHelpers.InvokeStyle import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Constants.* import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _} -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.{nme, str} -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.transform.Erasure -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ +import dotty.tools.dotc.transform.SymUtils.* +import dotty.tools.dotc.util.Spans.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* import dotty.tools.dotc.core.Decorators.em import dotty.tools.dotc.report @@ -33,13 +33,13 @@ import dotty.tools.dotc.report * */ trait BCodeBodyBuilder extends BCodeSkelBuilder { - // import global._ - // import definitions._ - import tpd._ + // import global.* + // import definitions.* + import tpd.* import int.{_, given} import DottyBackendInterface.symExtensions - import bTypes._ - import coreBTypes._ + import bTypes.* + import coreBTypes.* protected val primitives: DottyPrimitives @@ -126,7 +126,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { assert(resKind.isNumericType || (resKind == BOOL), s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]") - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* args match { // unary operation @@ -179,7 +179,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{ case Apply(DesugaredSelect(arrayObj, _), args) => - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* val k = tpeTK(arrayObj) genLoad(arrayObj, k) val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) @@ -262,7 +262,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val code = primitives.getPrimitive(tree, receiver.tpe) - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* if (isArithmeticOp(code)) genArithmeticOp(tree, code) else if (code == CONCAT) genStringConcat(tree) @@ -1267,7 +1267,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { /* Generate coercion denoted by "code" */ def genCoercion(code: Int): Unit = { - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* (code: @switch) match { case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () case _ => @@ -1443,7 +1443,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val mdescr = bmType.descriptor val isInterface = isEmittedInterface(receiverClass) - import InvokeStyle._ + import InvokeStyle.* if (style == Super) { if (isInterface && !method.is(JavaDefined)) { val args = new Array[BType](bmType.argumentTypes.length + 1) @@ -1497,7 +1497,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) bc.emitIF_ACMP(op, success) } else { - import Primitives._ + import Primitives.* def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE (tk: @unchecked) match { case LONG => emit(asm.Opcodes.LCMP) @@ -1512,7 +1512,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { - import Primitives._ + import Primitives.* if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) else { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index 3085c9411222..3779f59d33b0 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -14,20 +14,20 @@ import scala.compiletime.uninitialized import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.Trees -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Constants.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.NameKinds.ExpandedName import dotty.tools.dotc.core.Signature -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.NameKinds -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.transform.GenericSignatures import dotty.tools.dotc.transform.ElimErasedValueType @@ -45,12 +45,12 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions */ trait BCodeHelpers extends BCodeIdiomatic { // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - //import global._ - import bTypes._ - import tpd._ - import coreBTypes._ + //import global.* + import bTypes.* + import tpd.* + import coreBTypes.* import int.{_, given} - import DottyBackendInterface._ + import DottyBackendInterface.* // We need to access GenBCode phase to get access to post-processor components. // At this point it should always be initialized already. @@ -701,10 +701,10 @@ trait BCodeHelpers extends BCodeIdiomatic { * classes. */ private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = { - import ct.bTypes._ + import ct.bTypes.* val defn = ctx.definitions - import coreBTypes._ - import Types._ + import coreBTypes.* + import Types.* /** * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. @@ -852,7 +852,7 @@ trait BCodeHelpers extends BCodeIdiomatic { object BCodeHelpers { class InvokeStyle(val style: Int) extends AnyVal { - import InvokeStyle._ + import InvokeStyle.* def isVirtual: Boolean = this == Virtual def isStatic : Boolean = this == Static def isSpecial: Boolean = this == Special diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index efd1ab19764d..9938b7415da7 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -22,8 +22,8 @@ trait BCodeIdiomatic { val bTypes: BTypesFromSymbols[int.type] import int.{_, given} - import bTypes._ - import coreBTypes._ + import bTypes.* + import coreBTypes.* lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName @@ -617,7 +617,7 @@ trait BCodeIdiomatic { /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */ object JCodeMethodN { - import asm.Opcodes._ + import asm.Opcodes.* // ---------------- conversions ---------------- @@ -651,7 +651,7 @@ trait BCodeIdiomatic { * can-multi-thread */ final def coercionFrom(code: Int): BType = { - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* (code: @switch) match { case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT @@ -668,7 +668,7 @@ trait BCodeIdiomatic { * can-multi-thread */ final def coercionTo(code: Int): BType = { - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps.* (code: @switch) match { case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 073cc44e76b7..61383d2000d1 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -31,10 +31,10 @@ import dotty.tools.dotc.transform.SymUtils.* trait BCodeSkelBuilder extends BCodeHelpers { import int.{_, given} import DottyBackendInterface.{symExtensions, _} - import tpd._ - import bTypes._ - import coreBTypes._ - import bCodeAsmCommon._ + import tpd.* + import bTypes.* + import coreBTypes.* + import bCodeAsmCommon.* lazy val NativeAttr: Symbol = requiredClass[scala.native] diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala index 74e1c5812b14..4e2ea6dd52b8 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala @@ -9,7 +9,7 @@ import scala.tools.asm import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.core.StdNames.nme -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.ast.tpd /* @@ -20,9 +20,9 @@ import dotty.tools.dotc.ast.tpd */ trait BCodeSyncAndTry extends BCodeBodyBuilder { import int.given - import tpd._ - import bTypes._ - import coreBTypes._ + import tpd.* + import bTypes.* + import coreBTypes.* /* * Functionality to lower `synchronized` and `try` expressions. */ diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala index ba5e3e360e88..8b4c2834ed19 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala @@ -39,7 +39,7 @@ abstract class BTypes { self => def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) val coreBTypes: CoreBTypes { val bTypes: self.type} - import coreBTypes._ + import coreBTypes.* /** * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index dc5cf48de6fe..0743465b7b3b 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -7,12 +7,12 @@ import scala.annotation.threadUnsafe import scala.collection.mutable import scala.collection.mutable.Clearable -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.core.StdNames import dotty.tools.dotc.core.Phases @@ -29,12 +29,12 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce lazy val VolatileAttr = requiredClass[scala.volatile] val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) - import bCodeAsmCommon._ + import bCodeAsmCommon.* val coreBTypes = new CoreBTypesFromSymbols[I]{ val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this } - import coreBTypes._ + import coreBTypes.* @threadUnsafe protected lazy val classBTypeFromInternalNameMap = collection.concurrent.TrieMap.empty[String, ClassBType] @@ -286,7 +286,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait) - import asm.Opcodes._ + import asm.Opcodes.* import GenBCodeOps.addFlagIf 0 .addFlagIf(privateFlag, ACC_PRIVATE) .addFlagIf(!privateFlag, ACC_PUBLIC) @@ -312,7 +312,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce } def javaFieldFlags(sym: Symbol) = { - import asm.Opcodes._ + import asm.Opcodes.* import GenBCodeOps.addFlagIf javaFlags(sym) .addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT) diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index 840c58cb2314..2f8a469169cc 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -5,7 +5,7 @@ import scala.tools.asm.Handle import scala.tools.asm.tree.InvokeDynamicInsnNode import asm.tree.ClassNode import scala.collection.mutable -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import dotty.tools.dotc.report import scala.language.unsafeNulls @@ -92,9 +92,9 @@ class BackendUtils(val postProcessor: PostProcessor) { * methods. */ def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ + import asm.Opcodes.* + import bTypes.* + import coreBTypes.* val cw = classNode diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index 50b5d0e122e9..ec251b4aa3f0 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -4,7 +4,7 @@ import java.io.{DataOutputStream, IOException, BufferedOutputStream, FileOutputS import java.nio.ByteBuffer import java.nio.channels.{ClosedByInterruptException, FileChannel} import java.nio.charset.StandardCharsets.UTF_8 -import java.nio.file._ +import java.nio.file.* import java.nio.file.attribute.FileAttribute import java.util import java.util.concurrent.ConcurrentHashMap @@ -15,7 +15,7 @@ import dotty.tools.dotc.core.Decorators.em import dotty.tools.io.{AbstractFile, PlainFile} import dotty.tools.io.PlainFile.toPlainFile import BTypes.InternalName -import scala.util.chaining._ +import scala.util.chaining.* import dotty.tools.io.JarArchive import scala.language.unsafeNulls @@ -146,7 +146,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { val storeOnly = compressionLevel == Deflater.NO_COMPRESSION val jarWriter: JarOutputStream = { - import scala.util.Properties._ + import scala.util.Properties.* val manifest = new Manifest val attrs = manifest.getMainAttributes.nn attrs.put(MANIFEST_VERSION, "1.0") diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index 4bf305f3387c..a477e55e2b68 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -8,17 +8,17 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Phases.Phase import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ +import scala.jdk.CollectionConverters.* +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.interfaces import dotty.tools.dotc.report import java.util.Optional import dotty.tools.dotc.sbt.ExtractDependencies -import dotty.tools.dotc.core._ -import Contexts._ -import Phases._ -import Symbols._ +import dotty.tools.dotc.core.* +import Contexts.* +import Phases.* +import Symbols.* import StdNames.nme import java.io.DataOutputStream @@ -28,8 +28,8 @@ import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler } import dotty.tools.dotc.core.tasty.TastyUnpickler import scala.tools.asm -import scala.tools.asm.tree._ -import tpd._ +import scala.tools.asm.tree.* +import tpd.* import dotty.tools.io.AbstractFile import dotty.tools.dotc.util import dotty.tools.dotc.util.NoSourcePosition @@ -37,7 +37,7 @@ import dotty.tools.dotc.util.NoSourcePosition class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val bTypes: BTypesFromSymbols[int.type]) { self => import DottyBackendInterface.symExtensions - import bTypes._ + import bTypes.* import int.given private lazy val mirrorCodeGen = Impl.JMirrorBuilder() diff --git a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala index 299c1c75d6cf..94a946989d23 100644 --- a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala +++ b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala @@ -1,9 +1,9 @@ package dotty.tools.backend.jvm import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Flags.Trait import dotty.tools.dotc.transform.MegaPhase.MiniPhase @@ -18,7 +18,7 @@ import dotty.tools.dotc.transform.MegaPhase.MiniPhase * the redundant mixin class could be required as a parent by the JVM. */ class CollectSuperCalls extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = CollectSuperCalls.name diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index 070de4b1b0e2..5a3980347bcb 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -3,7 +3,7 @@ package backend package jvm -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.transform.Erasure import scala.tools.asm.{Handle, Opcodes} import dotty.tools.dotc.core.StdNames @@ -11,7 +11,7 @@ import BTypes.InternalName abstract class CoreBTypes { val bTypes: BTypes - import bTypes._ + import bTypes.* def primitiveTypeMap: Map[Symbol, PrimitiveBType] @@ -55,9 +55,9 @@ abstract class CoreBTypes { abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes { val bTypes: BTypesFromSymbols[I] - import bTypes._ + import bTypes.* import int.given - import DottyBackendInterface._ + import DottyBackendInterface.* import frontendAccess.frontendSynch import dotty.tools.dotc.core.Contexts.Context @@ -223,7 +223,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy ) lazy val typeOfArrayOp: Map[Int, BType] = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* Map( (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index c005a6d38403..30568ef92b2d 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -3,23 +3,23 @@ package dotty.tools.backend.jvm import scala.language.unsafeNulls import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.transform.SymUtils.* import java.io.{File => _} import scala.reflect.ClassTag import dotty.tools.io.AbstractFile -import dotty.tools.dotc.core._ -import Contexts._ -import Types._ -import Symbols._ -import Phases._ +import dotty.tools.dotc.core.* +import Contexts.* +import Types.* +import Symbols.* +import Phases.* import Decorators.em import dotty.tools.dotc.util.ReadOnlyMap import dotty.tools.dotc.report -import tpd._ +import tpd.* import StdNames.nme import NameKinds.{LazyBitMapName, LazyLocalName} diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 6dcaaa23c61d..d5c111259f01 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -3,11 +3,11 @@ package dotty.tools.backend.jvm import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.report -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.* import dotty.tools.dotc.interfaces.CompilerCallback -import Contexts._ -import Symbols._ -import dotty.tools.io._ +import Contexts.* +import Symbols.* +import dotty.tools.io.* import scala.collection.mutable import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala index f17f9d620d90..bf2ae9a131aa 100644 --- a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala +++ b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala @@ -2,7 +2,7 @@ package dotty.tools.backend.jvm import java.nio.channels.ClosedByInterruptException import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy -import java.util.concurrent._ +import java.util.concurrent.* import scala.collection.mutable.ListBuffer import scala.concurrent.duration.Duration diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala index 71bf307266c5..00d7dc598509 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala @@ -3,12 +3,12 @@ package dotty.tools.backend.jvm import scala.language.unsafeNulls import scala.tools.asm.{ClassReader, Type, Handle } -import scala.tools.asm.tree._ +import scala.tools.asm.tree.* import scala.collection.mutable import scala.util.control.{NoStackTrace, NonFatal} -import scala.annotation._ -import scala.jdk.CollectionConverters._ +import scala.annotation.* +import scala.jdk.CollectionConverters.* // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf // https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928 diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala index e6911c766655..4e3438f3d78a 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -14,7 +14,7 @@ import dotty.tools.dotc.core.Phases * frontend. All methods are synchronized. */ sealed abstract class PostProcessorFrontendAccess { - import PostProcessorFrontendAccess._ + import PostProcessorFrontendAccess.* def compilerSettings: CompilerSettings diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index bc453aec17af..262b5df43362 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -2,10 +2,10 @@ package dotty.tools package backend.jvm import dotc.ast.Trees.Select -import dotc.ast.tpd._ -import dotc.core._ -import Contexts._ -import Names.TermName, StdNames._ +import dotc.ast.tpd.* +import dotc.core.* +import Contexts.* +import Names.TermName, StdNames.* import Types.{JavaArrayType, UnspecifiedErrorType, Type} import Symbols.{Symbol, NoSymbol} import Decorators.em @@ -32,7 +32,7 @@ import scala.annotation.threadUnsafe * Inspired from the `scalac` compiler. */ class DottyPrimitives(ictx: Context) { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index 1579b4577933..2c5a6639dc8b 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -1,8 +1,8 @@ package dotty.tools.backend.sjs -import dotty.tools.dotc.core._ -import Contexts._ -import Phases._ +import dotty.tools.dotc.core.* +import Contexts.* +import Phases.* /** Generates Scala.js IR files for the compilation unit. */ class GenSJSIR extends Phase { diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index fa8f99fc0e03..d0694617f61e 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -8,20 +8,20 @@ import scala.collection.mutable import dotty.tools.FatalError import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators._ -import Flags._ -import Names._ +import dotty.tools.dotc.core.* +import Contexts.* +import Decorators.* +import Flags.* +import Names.* import NameKinds.DefaultGetterName -import Types._ -import Symbols._ -import Phases._ -import StdNames._ +import Types.* +import Symbols.* +import Phases.* +import StdNames.* import TypeErasure.ErasedValueType import dotty.tools.dotc.transform.{Erasure, ValueClasses} -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report @@ -32,9 +32,9 @@ import org.scalajs.ir.OriginalName import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Trees.OptimizerHints -import dotty.tools.dotc.transform.sjs.JSSymUtils._ +import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import JSEncoding._ +import JSEncoding.* import ScopedVar.withScopedVars /** Main codegen for Scala.js IR. @@ -54,15 +54,15 @@ import ScopedVar.withScopedVars * - `genStatOrExpr()` and everything else generate the bodies of methods. */ class JSCodeGen()(using genCtx: Context) { - import JSCodeGen._ - import tpd._ + import JSCodeGen.* + import tpd.* val sjsPlatform = dotty.tools.dotc.config.SJSPlatform.sjsPlatform val jsdefn = JSDefinitions.jsdefn private val primitives = new JSPrimitives(genCtx) val positionConversions = new JSPositions()(using genCtx) - import positionConversions._ + import positionConversions.* private val jsExportsGen = new JSExportsGen(this) @@ -1827,7 +1827,7 @@ class JSCodeGen()(using genCtx: Context) { } case Literal(value) => - import Constants._ + import Constants.* value.tag match { case UnitTag => js.Skip() @@ -2525,7 +2525,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a primitive method call. */ private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos = tree.span @@ -2565,7 +2565,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a simple unary operation. */ private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos = tree.span @@ -2606,7 +2606,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a simple binary operation. */ private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos: SourcePosition = tree.sourcePos @@ -2646,7 +2646,7 @@ class JSCodeGen()(using genCtx: Context) { } else if (code == ZAND) { js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType) } else { - import js.BinaryOp._ + import js.BinaryOp.* (opType: @unchecked) match { case jstpe.IntType => @@ -2768,7 +2768,7 @@ class JSCodeGen()(using genCtx: Context) { */ private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)( implicit pos: Position): js.Tree = { - import js.UnaryOp._ + import js.UnaryOp.* if (from == to || from == jstpe.NothingType) { value @@ -2823,7 +2823,7 @@ class JSCodeGen()(using genCtx: Context) { private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)( implicit pos: SourcePosition): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* val bypassEqEq = { // Do not call equals if we have a literal null at either side. @@ -2931,7 +2931,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for an array operation (get, set or length) */ private def genArrayOp(tree: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ + import dotty.tools.backend.ScalaPrimitivesOps.* implicit val pos = tree.span @@ -3766,7 +3766,7 @@ class JSCodeGen()(using genCtx: Context) { private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int, isStat: Boolean): js.Tree = { - import JSPrimitives._ + import JSPrimitives.* implicit val pos = tree.span @@ -4696,7 +4696,7 @@ class JSCodeGen()(using genCtx: Context) { } private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = { - import js.JSNativeLoadSpec._ + import js.JSNativeLoadSpec.* val symOwner = sym.owner diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index 49755a8ee83e..ab7f9a89f9c5 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -4,12 +4,12 @@ import scala.language.unsafeNulls import scala.annotation.threadUnsafe -import dotty.tools.dotc.core._ -import Names._ -import Types._ -import Contexts._ -import Symbols._ -import StdNames._ +import dotty.tools.dotc.core.* +import Names.* +import Types.* +import Contexts.* +import Symbols.* +import StdNames.* import dotty.tools.dotc.config.SJSPlatform import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index 73a150c60290..f2b90d5b1161 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -4,16 +4,16 @@ import scala.language.unsafeNulls import scala.collection.mutable -import dotty.tools.dotc.core._ -import Contexts._ -import Flags._ -import Types._ -import Symbols._ -import NameOps._ -import Names._ -import StdNames._ +import dotty.tools.dotc.core.* +import Contexts.* +import Flags.* +import Types.* +import Symbols.* +import NameOps.* +import Names.* +import StdNames.* -import dotty.tools.dotc.transform.sjs.JSSymUtils._ +import dotty.tools.dotc.transform.sjs.JSSymUtils.* import org.scalajs.ir import org.scalajs.ir.{Trees => js, Types => jstpe} @@ -61,7 +61,7 @@ object JSEncoding { // Fresh local name generator ---------------------------------------------- class LocalNameGenerator { - import LocalNameGenerator._ + import LocalNameGenerator.* private val usedLocalNames = mutable.Set.empty[LocalName] private val localSymbolNames = mutable.Map.empty[Symbol, LocalName] diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 78412999bb34..8c72f03e7cc4 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -5,18 +5,18 @@ import scala.language.unsafeNulls import scala.annotation.tailrec import scala.collection.mutable -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.* -import Contexts._ -import Decorators._ -import Denotations._ -import Flags._ -import Names._ +import Contexts.* +import Decorators.* +import Denotations.* +import Flags.* +import Names.* import NameKinds.DefaultGetterName -import NameOps._ -import Phases._ -import Symbols._ -import Types._ +import NameOps.* +import Phases.* +import Symbols.* +import Types.* import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} @@ -28,14 +28,14 @@ import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Position.NoPosition import org.scalajs.ir.Trees.OptimizerHints -import dotty.tools.dotc.transform.sjs.JSExportUtils._ -import dotty.tools.dotc.transform.sjs.JSSymUtils._ +import dotty.tools.dotc.transform.sjs.JSExportUtils.* +import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import JSEncoding._ +import JSEncoding.* final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { - import jsCodeGen._ - import positionConversions._ + import jsCodeGen.* + import positionConversions.* /** Info for a non-member export. */ sealed trait ExportInfo { @@ -154,7 +154,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { (info, tups) <- exports.groupBy(_._1) kind <- checkSameKind(tups) } yield { - import ExportKind._ + import ExportKind.* implicit val pos = info.pos @@ -201,7 +201,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { implicit val pos = info.pos - import ExportKind._ + import ExportKind.* kind match { case Method => diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 2fd007165952..3b25187b0acd 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -4,8 +4,8 @@ import scala.language.unsafeNulls import java.net.{URI, URISyntaxException} -import dotty.tools.dotc.core._ -import Contexts._ +import dotty.tools.dotc.core.* +import Contexts.* import Decorators.em import dotty.tools.dotc.report @@ -17,7 +17,7 @@ import org.scalajs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { - import JSPositions._ + import JSPositions.* private val sourceURIMaps: List[URIMap] = { ctx.settings.scalajsMapSourceURI.value.flatMap { option => @@ -64,7 +64,7 @@ class JSPositions()(using Context) { sourceAndSpan2irPos(sourcePos.source, sourcePos.span) private object span2irPosCache { - import dotty.tools.dotc.util._ + import dotty.tools.dotc.util.* private var lastDotcSource: SourceFile = null private var lastIRSource: ir.Position.SourceFile = null diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 029273aed54b..a3a37795826a 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -1,13 +1,13 @@ package dotty.tools.backend.sjs -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.* import Names.TermName -import Types._ -import Contexts._ -import Symbols._ +import Types.* +import Contexts.* +import Symbols.* import Decorators.em -import dotty.tools.dotc.ast.tpd._ +import dotty.tools.dotc.ast.tpd.* import dotty.tools.backend.jvm.DottyPrimitives import dotty.tools.dotc.report import dotty.tools.dotc.util.ReadOnlyMap @@ -64,7 +64,7 @@ object JSPrimitives { } class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { - import JSPrimitives._ + import JSPrimitives.* private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index 40ad66a0a9e6..cbc490919cfe 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -1,7 +1,7 @@ package dotty.tools package dotc -import core.Contexts._ +import core.Contexts.* import reporting.Reporter import io.AbstractFile diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index a906d52ccd4e..686414a4fd9b 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -1,10 +1,10 @@ package dotty.tools package dotc -import core._ -import Contexts._ +import core.* +import Contexts.* import SymDenotations.ClassDenotation -import Symbols._ +import Symbols.* import Comments.Comment import util.{FreshNameCreator, SourceFile, NoSource} import util.Spans.Span @@ -12,8 +12,8 @@ import ast.{tpd, untpd} import tpd.{Tree, TreeTraverser} import ast.Trees.{Import, Ident} import typer.Nullables -import transform.SymUtils._ -import core.Decorators._ +import transform.SymUtils.* +import core.Decorators.* import config.{SourceVersion, Feature} import StdNames.nme import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 22150afad1e8..aaa14a052936 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -1,12 +1,12 @@ package dotty.tools package dotc -import core._ -import Contexts._ +import core.* +import Contexts.* import typer.{TyperPhase, RefChecks} import parsing.Parser import Phases.Phase -import transform._ +import transform.* import dotty.tools.backend import backend.jvm.{CollectSuperCalls, GenBCode} import localopt.StringInterpolatorOpt diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index e548cae55ddd..4207baa57470 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -3,12 +3,12 @@ package dotty.tools.dotc import dotty.tools.FatalError import config.CompilerCommand import core.Comments.{ContextDoc, ContextDocstrings} -import core.Contexts._ +import core.Contexts.* import core.{MacroClassLoader, TypeError} import dotty.tools.dotc.ast.Positioned import dotty.tools.io.AbstractFile -import reporting._ -import core.Decorators._ +import reporting.* +import core.Decorators.* import config.Feature import scala.util.control.NonFatal diff --git a/compiler/src/dotty/tools/dotc/Resident.scala b/compiler/src/dotty/tools/dotc/Resident.scala index 0b9bca0dc75b..481d321773c6 100644 --- a/compiler/src/dotty/tools/dotc/Resident.scala +++ b/compiler/src/dotty/tools/dotc/Resident.scala @@ -1,7 +1,7 @@ package dotty.tools package dotc -import core.Contexts._ +import core.Contexts.* import reporting.Reporter import java.io.EOFException import scala.annotation.tailrec diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 7b09e7ef833a..bec1c89d7216 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -1,22 +1,22 @@ package dotty.tools package dotc -import core._ -import Contexts._ -import Periods._ -import Symbols._ -import Scopes._ +import core.* +import Contexts.* +import Periods.* +import Symbols.* +import Scopes.* import Names.Name import Denotations.Denotation import typer.Typer import typer.ImportInfo.withRootImports -import Decorators._ +import Decorators.* import io.AbstractFile import Phases.{unfusedPhases, Phase} import sbt.interfaces.ProgressCallback -import util._ +import util.* import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} import reporting.Diagnostic import reporting.Diagnostic.Warning diff --git a/compiler/src/dotty/tools/dotc/ScalacCommand.scala b/compiler/src/dotty/tools/dotc/ScalacCommand.scala index 2e0d9a08f25d..5f7f80a262d8 100644 --- a/compiler/src/dotty/tools/dotc/ScalacCommand.scala +++ b/compiler/src/dotty/tools/dotc/ScalacCommand.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc -import config.Properties._ +import config.Properties.* import config.CompilerCommand object ScalacCommand extends CompilerCommand: diff --git a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled index 6bf7530faf24..08d409772331 100644 --- a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled +++ b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled @@ -2,14 +2,14 @@ package dotty.tools package dotc package ast -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ +import core.* +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.* +import SymDenotations.*, Symbols.*, StdNames.*, Annotations.*, Trees.* // TODO: revise, integrate in a checking phase. object CheckTrees { - import tpd._ + import tpd.* def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 471d7f6af0dc..0607b3729654 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package ast -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ -import Symbols._, StdNames._, Trees._, ContextOps._ -import Decorators._, transform.SymUtils._ +import core.* +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, NameOps.*, Flags.* +import Symbols.*, StdNames.*, Trees.*, ContextOps.* +import Decorators.*, transform.SymUtils.* import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, Chars} import config.Feature.{sourceVersion, migrateTo3, enabled} -import config.SourceVersion._ +import config.SourceVersion.* import collection.mutable.ListBuffer -import reporting._ +import reporting.* import annotation.constructorOnly import printing.Formatting.hl import config.Printers @@ -22,8 +22,8 @@ import scala.annotation.internal.sharable import scala.annotation.threadUnsafe object desugar { - import untpd._ - import DesugarEnums._ + import untpd.* + import DesugarEnums.* /** An attachment for companion modules of classes that have a `derives` clause. * The position value indicates the start position of the template of the diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index a1c3c0ed0775..4b00f72bf21e 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -2,19 +2,19 @@ package dotty.tools package dotc package ast -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import Symbols._, StdNames._, Trees._ -import Decorators._ +import core.* +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.* +import Symbols.*, StdNames.*, Trees.* +import Decorators.* import util.{Property, SourceFile} -import typer.ErrorReporting._ +import typer.ErrorReporting.* import transform.SyntheticMembers.ExtendsSingletonMirror import scala.annotation.internal.sharable /** Helper methods to desugar enums */ object DesugarEnums { - import untpd._ + import untpd.* enum CaseKind: case Simple, Object, Class diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index c0cf2c0d1b81..8ee75cbf364b 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package ast -import core._ -import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ +import core.* +import Symbols.*, Types.*, Contexts.*, Decorators.*, util.Spans.*, Flags.*, Constants.* import StdNames.{nme, tpnme} -import ast.Trees._ +import ast.Trees.* import Names.Name import Comments.Comment import NameKinds.DefaultGetterName @@ -24,7 +24,7 @@ object MainProxies { * * would be translated to something like * - * import CommandLineParser._ + * import CommandLineParser.* * class f { * @static def main(args: Array[String]): Unit = * try @@ -36,7 +36,7 @@ object MainProxies { * } */ private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ + import tpd.* def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => stat.symbol :: Nil @@ -48,7 +48,7 @@ object MainProxies { mainMethods(stats).flatMap(mainProxy) } - import untpd._ + import untpd.* private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span def pos = mainFun.sourcePos @@ -172,7 +172,7 @@ object MainProxies { * } */ private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ + import tpd.* /** * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this @@ -405,7 +405,7 @@ object MainProxies { /** A class responsible for extracting the docstrings of a method. */ private class Documentation(docComment: Option[Comment]): - import util.CommentParsing._ + import util.CommentParsing.* /** The main part of the documentation. */ lazy val mainDoc: String = _mainDoc diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index ace396d1e583..2960af8fcdec 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package ast -import core.Contexts._ -import core.Decorators._ -import util.Spans._ +import core.Contexts.* +import core.Decorators.* +import util.Spans.* import Trees.{MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType import dotty.tools.dotc.core.Types.ImportType diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index 980042f8292e..d8017783f47f 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package ast -import util.Spans._ +import util.Spans.* import util.{SourceFile, SourcePosition, SrcPos} -import core.Contexts._ -import core.Decorators._ -import core.NameOps._ +import core.Contexts.* +import core.Decorators.* +import core.NameOps.* import core.Flags.{JavaDefined, ExtensionMethod} import core.StdNames.nme import ast.Trees.mods @@ -165,7 +165,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src * - If item is a non-empty tree, it has a position */ def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { - import untpd._ + import untpd.* val last = LastPosRef() def check(p: Any): Unit = p match { case p: Positioned => diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 7616ef220d7f..9b841e8458ec 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package ast -import core._ -import Flags._, Trees._, Types._, Contexts._ -import Names._, StdNames._, NameOps._, Symbols._ +import core.* +import Flags.*, Trees.*, Types.*, Contexts.* +import Names.*, StdNames.*, NameOps.*, Symbols.* import typer.ConstFold import reporting.trace -import dotty.tools.dotc.transform.SymUtils._ -import Decorators._ +import dotty.tools.dotc.transform.SymUtils.* +import Decorators.* import Constants.Constant import scala.collection.mutable @@ -390,7 +390,7 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => - import untpd._ + import untpd.* /** The underlying tree when stripping any TypedSplice or Parens nodes */ override def unsplice(tree: Tree): Tree = tree match { @@ -495,8 +495,8 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] } trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => - import TreeInfo._ - import tpd._ + import TreeInfo.* + import tpd.* /** The purity level of this statement. * @return Pure if statement has no side effects diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index ae674c25dc3d..5603a422a77b 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package ast -import Trees._ -import core.Contexts._ +import Trees.* +import core.Contexts.* import core.ContextOps.enter -import core.Flags._ -import core.Symbols._ +import core.Flags.* +import core.Symbols.* import core.TypeError /** A TreeMap that maintains the necessary infrastructure to support @@ -14,7 +14,7 @@ import core.TypeError * This incudes implicits defined in scope as well as imported implicits. */ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { - import tpd._ + import tpd.* def transformSelf(vd: ValDef)(using Context): ValDef = cpy.ValDef(vd)(tpt = transform(vd.tpt)) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index d2e18729836b..15c61bc2b8d4 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package ast -import core._ -import Types._, Contexts._, Flags._ -import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant -import Decorators._ -import dotty.tools.dotc.transform.SymUtils._ +import core.* +import Types.*, Contexts.*, Flags.* +import Symbols.*, Annotations.*, Trees.*, Symbols.*, Constants.Constant +import Decorators.* +import dotty.tools.dotc.transform.SymUtils.* /** A map that applies three functions and a substitution together to a tree and * makes sure they are coordinated so that the result is well-typed. The functions are @@ -39,7 +39,7 @@ class TreeTypeMap( val substFrom: List[Symbol] = Nil, val substTo: List[Symbol] = Nil, cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy) { - import tpd._ + import tpd.* def copy( typeMap: Type => Type, diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 1ad8bf520830..4ec41b95a90b 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package ast -import core._ -import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ +import core.* +import Types.*, Names.*, NameOps.*, Flags.*, util.Spans.*, Contexts.*, Constants.* import typer.{ ConstFold, ProtoTypes } -import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ +import SymDenotations.*, Symbols.*, Denotations.*, StdNames.*, Comments.* import collection.mutable.ListBuffer import printing.Printer import printing.Texts.Text @@ -16,7 +16,7 @@ import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly import compiletime.uninitialized -import Decorators._ +import Decorators.* import staging.StagingLevel.* object Trees { diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 7625a61d48aa..acffb1e89972 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -4,13 +4,13 @@ package ast import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} import typer.ProtoTypes -import transform.SymUtils._ -import transform.TypeUtils._ -import core._ +import transform.SymUtils.* +import transform.TypeUtils.* +import core.* import Scopes.newScope -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ -import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ -import Decorators._, DenotTransformers._ +import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.*, NameOps.* +import Symbols.*, StdNames.*, Annotations.*, Trees.*, Symbols.* +import Decorators.*, DenotTransformers.* import collection.{immutable, mutable} import util.{Property, SourceFile} import NameKinds.{TempResultName, OuterSelectName} diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 41ba452fa80a..8cdc6f710ac9 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package ast -import core._ -import Types._, Contexts._, Constants._, Names._, Flags._ +import core.* +import Types.*, Contexts.*, Constants.*, Names.*, Flags.* import dotty.tools.dotc.typer.ProtoTypes -import Symbols._, StdNames._, Trees._ +import Symbols.*, StdNames.*, Trees.* import util.{Property, SourceFile, NoSource} import util.Spans.Span import annotation.constructorOnly import annotation.internal.sharable -import Decorators._ +import Decorators.* object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 8ba53693870c..270fd9322a88 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package cc -import core._ +import core.* import Phases.*, DenotTransformers.*, SymDenotations.* import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* import Types.*, StdNames.* diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index ac8b69381938..0b66f339bf53 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -4,9 +4,9 @@ package dotty.tools.dotc.classpath import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ +import FileUtils.* import dotty.tools.io.ClassPath -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* /** * Provides factory methods for classpath. When creating classpath instances for a given path, diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index da1276f10dd7..26ed2734890e 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -11,10 +11,10 @@ import java.nio.file.{FileSystems, Files} import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} -import FileUtils._ +import FileUtils.* import PlainFile.toPlainFile -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import scala.collection.immutable.ArraySeq import scala.util.control.NonFatal @@ -126,9 +126,9 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo } object JrtClassPath { - import java.nio.file._, java.net.URI + import java.nio.file.*, java.net.URI def apply(release: Option[String]): Option[ClassPath] = { - import scala.util.Properties._ + import scala.util.Properties.* if (!isJavaAtLeast("9")) None else { // Longer term we'd like an official API for this in the JDK @@ -165,7 +165,7 @@ object JrtClassPath { * The implementation assumes that no classes exist in the empty package. */ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ + import java.nio.file.Path, java.nio.file.* type F = Path private val dir: Path = fs.getPath("/packages") @@ -214,7 +214,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 */ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ + import java.nio.file.Path, java.nio.file.* private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) private val root: Path = fileSystem.getRootDirectories.iterator.next diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 4b777444c3bf..f520cd97767e 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -4,7 +4,7 @@ import scala.language.unsafeNulls import dotty.tools.io.{ClassPath, ClassRepresentation} import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ +import FileUtils.* import java.net.{URI, URL} case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index b38e1841728d..0ca996db4812 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -13,8 +13,8 @@ import java.nio.file.attribute.{BasicFileAttributes, FileTime} import scala.annotation.tailrec import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} -import dotty.tools.dotc.core.Contexts._ -import FileUtils._ +import dotty.tools.dotc.core.Contexts.* +import FileUtils.* /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index 8033291f5dd3..ca8636e3884f 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -9,7 +9,7 @@ import java.io.File import java.net.URL import dotty.tools.io.{ AbstractFile, FileZipArchive } -import FileUtils._ +import FileUtils.* import dotty.tools.io.{EfficientClassPath, ClassRepresentation} /** diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 914df040fbf7..b76af885765c 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -3,8 +3,8 @@ package config import scala.language.unsafeNulls -import Settings._ -import core.Contexts._ +import Settings.* +import core.Contexts.* import printing.Highlighting import scala.util.chaining.given diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 41e123472a75..2ffe900fbdbf 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package config -import Settings._ -import core.Contexts._ +import Settings.* +import core.Contexts.* abstract class CompilerCommand extends CliCommand: type ConcreteSettings = ScalaSettings @@ -21,6 +21,6 @@ abstract class CompilerCommand extends CliCommand: else "" final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = - import settings._ + import settings.* val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 9492f02abbf1..74966d89295e 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -2,12 +2,12 @@ package dotty.tools package dotc package config -import core._ -import Contexts._, Symbols._, Names._ +import core.* +import Contexts.*, Symbols.*, Names.* import StdNames.nme import Decorators.* import util.{SrcPos, NoSourcePosition} -import SourceVersion._ +import SourceVersion.* import reporting.Message import NameKinds.QualifiedName diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala index f611360dd4ca..ed8ef6c8372e 100644 --- a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala @@ -2,12 +2,12 @@ package dotty.tools package dotc package config -import io._ +import io.* import classpath.AggregateClassPath -import core._ -import Symbols._, Types._, Contexts._, StdNames._ -import Flags._ -import transform.ExplicitOuter, transform.SymUtils._ +import core.* +import Symbols.*, Types.*, Contexts.*, StdNames.* +import Flags.* +import transform.ExplicitOuter, transform.SymUtils.* class JavaPlatform extends Platform { @@ -52,7 +52,7 @@ class JavaPlatform extends Platform { */ def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { val d = defn - import d._ + import d.* (sym == ObjectClass) || (sym == JavaSerializableClass) || (sym == ComparableClass) || diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala index 0411c5604768..ba121d06e35a 100644 --- a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala +++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala @@ -4,7 +4,7 @@ package config import scala.language.unsafeNulls -import io._ +import io.* /** A class for holding mappings from source directories to * their output location. This functionality can be accessed diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index 8b4eedb0e9d2..29e6e35855c8 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -9,15 +9,15 @@ import io.{ClassPath, Directory, Path} import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} import ClassPath.split import PartialFunction.condOpt -import core.Contexts._ -import Settings._ +import core.Contexts.* +import Settings.* import dotty.tools.io.File object PathResolver { // Imports property/environment functions which suppress // security exceptions. - import AccessControl._ + import AccessControl.* def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" @@ -208,7 +208,7 @@ class PathResolver(using c: Context) { if (!settings.classpath.isDefault) settings.classpath.value else sys.env.getOrElse("CLASSPATH", ".") - import classPathFactory._ + import classPathFactory.* // Assemble the elements! def basis: List[Iterable[ClassPath]] = diff --git a/compiler/src/dotty/tools/dotc/config/Platform.scala b/compiler/src/dotty/tools/dotc/config/Platform.scala index 73a05fbd41c1..2a0b207e68c1 100644 --- a/compiler/src/dotty/tools/dotc/config/Platform.scala +++ b/compiler/src/dotty/tools/dotc/config/Platform.scala @@ -3,7 +3,7 @@ package dotc package config import io.{ClassPath, AbstractFile} -import core.Contexts._, core.Symbols._ +import core.Contexts.*, core.Symbols.* import core.SymbolLoader import core.StdNames.nme import core.Flags.Module diff --git a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala index 0275e0d6a227..f6e29754ef94 100644 --- a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.config -import dotty.tools.dotc.core._ -import Contexts._ -import Symbols._ +import dotty.tools.dotc.core.* +import Contexts.* +import Symbols.* import dotty.tools.backend.sjs.JSDefinitions diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index b10c9859e4d0..bd33a075c584 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -6,12 +6,12 @@ import scala.language.unsafeNulls import dotty.tools.dotc.config.PathResolver.Defaults import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} import dotty.tools.dotc.config.SourceVersion -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.rewrites.Rewrites import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} import Setting.ChoiceWithHelp -import scala.util.chaining._ +import scala.util.chaining.* import java.util.zip.Deflater diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala index 7fdf57478f1a..9f603e6792be 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala @@ -89,7 +89,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu def isInt(s: String) = Try(toInt(s)).isSuccess - import ScalaBuild._ + import ScalaBuild.* def toBuild(s: String) = s match { case null | "FINAL" => Final diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 5199c6727b41..79eb2b882f8f 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -3,7 +3,7 @@ package config import scala.language.unsafeNulls -import core.Contexts._ +import core.Contexts.* import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} diff --git a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala index 5b79432a97e7..20304b74c1da 100644 --- a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala @@ -22,7 +22,7 @@ trait WrappedProperties extends PropertiesTrait { override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten def systemProperties: Iterator[(String, String)] = { - import scala.jdk.CollectionConverters._ + import scala.jdk.CollectionConverters.* wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty } } diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 202f3eb26e41..43c753458f6e 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Symbols._, Types._, Contexts._, Constants._, Phases.* +import Symbols.*, Types.*, Contexts.*, Constants.*, Phases.* import ast.tpd, tpd.* import util.Spans.Span import printing.{Showable, Printer} diff --git a/compiler/src/dotty/tools/dotc/core/Atoms.scala b/compiler/src/dotty/tools/dotc/core/Atoms.scala index bcaaf6794107..a68a07947965 100644 --- a/compiler/src/dotty/tools/dotc/core/Atoms.scala +++ b/compiler/src/dotty/tools/dotc/core/Atoms.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._ +import Types.* /** Indicates the singleton types that a type must or may consist of. * @param lo The lower bound: singleton types in this set are guaranteed diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index a61701eee2d7..060189016828 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package core -import Contexts._, Types._, Symbols._, Names._, Flags._ +import Contexts.*, Types.*, Symbols.*, Names.*, Flags.* import Denotations.SingleDenotation -import Decorators._ +import Decorators.* import collection.mutable import config.SourceVersion.future import config.Feature.sourceVersion @@ -62,7 +62,7 @@ object CheckRealizable { * Type.isStable). */ class CheckRealizable(using Context) { - import CheckRealizable._ + import CheckRealizable.* /** A set of all fields that have already been checked. Used * to avoid infinite recursions when analyzing recursive types. diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 1b20b75ad8ac..80e9a105d658 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -5,10 +5,10 @@ package core import scala.language.unsafeNulls import ast.{ untpd, tpd } -import Symbols._, Contexts._ +import Symbols.*, Contexts.* import util.{SourceFile, ReadOnlyMap} -import util.Spans._ -import util.CommentParsing._ +import util.Spans.* +import util.CommentParsing.* import util.Property.Key import parsing.Parsers.Parser import reporting.ProperDefinitionNotFound diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala index f45e9e5217de..63acfbe55701 100644 --- a/compiler/src/dotty/tools/dotc/core/Constants.scala +++ b/compiler/src/dotty/tools/dotc/core/Constants.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._, Symbols._, Contexts._ +import Types.*, Symbols.*, Contexts.* import printing.Printer import printing.Texts.Text @@ -210,7 +210,7 @@ object Constants { } override def hashCode: Int = { - import scala.util.hashing.MurmurHash3._ + import scala.util.hashing.MurmurHash3.* val seed = 17 var h = seed h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index c634f847e510..59f3aa9838c2 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._, Contexts._ +import Types.*, Contexts.* import printing.Showable import util.{SimpleIdentitySet, SimpleIdentityMap} diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index bbe46c344890..d43739019f2f 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package core -import Types._ -import Contexts._ -import Symbols._ -import Decorators._ -import Flags._ +import Types.* +import Contexts.* +import Symbols.* +import Decorators.* +import Flags.* import config.Config import config.Printers.typr import typer.ProtoTypes.{newTypeVar, representedParamRef} diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index 412b4228677d..8ec38d52e725 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package core -import Contexts._ +import Contexts.* import config.Printers.{default, typr} import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index aa85f714a8e5..8e0b022b891e 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package core -import Contexts._, Symbols._, Types._, Flags._ -import Denotations._, SymDenotations._ +import Contexts.*, Symbols.*, Types.*, Flags.* +import Denotations.*, SymDenotations.* import Names.Name, StdNames.nme import ast.untpd diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 78d23ed6eac6..c575f7bc72f7 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -3,28 +3,28 @@ package dotc package core import interfaces.CompilerCallback -import Decorators._ -import Periods._ -import Names._ -import Phases._ -import Types._ -import Symbols._ -import Scopes._ -import Uniques._ -import ast.Trees._ +import Decorators.* +import Periods.* +import Names.* +import Phases.* +import Types.* +import Symbols.* +import Scopes.* +import Uniques.* +import ast.Trees.* import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} import inlines.Inliner -import Nullables._ +import Nullables.* import Implicits.ContextualImplicits -import config.Settings._ +import config.Settings.* import config.Config -import reporting._ +import reporting.* import io.{AbstractFile, NoAbstractFile, PlainFile, Path} import scala.io.Codec import collection.mutable -import printing._ +import printing.* import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} import classfile.ReusableDataReader import StdNames.nme @@ -37,7 +37,7 @@ import dotty.tools.dotc.profile.Profiler import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback} import util.Property.Key import util.Store -import plugins._ +import plugins.* import java.util.concurrent.atomic.AtomicInteger import java.nio.file.InvalidPathException @@ -915,7 +915,7 @@ object Contexts { def next()(using Context): FreshContext = val base = ctx.base - import base._ + import base.* val nestedCtx = if inUse < pool.size then pool(inUse).reuseIn(ctx) diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index fc2b6a852216..29d4b3fa4052 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -6,8 +6,8 @@ import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import scala.util.control.NonFatal -import Contexts._, Names._, Phases._, Symbols._ -import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ +import Contexts.*, Names.*, Phases.*, Symbols.* +import printing.{ Printer, Showable }, printing.Formatting.*, printing.Texts.* import transform.MegaPhase import reporting.{Message, NoExplanation} diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 40370973ebf0..f86eab71bc07 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -3,8 +3,8 @@ package dotc package core import scala.annotation.{threadUnsafe => tu} -import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ -import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ +import Types.*, Contexts.*, Symbols.*, SymDenotations.*, StdNames.*, Names.*, Phases.* +import Flags.*, Scopes.*, Decorators.*, NameOps.*, Periods.*, NullOpsDecorator.* import unpickleScala2.Scala2Unpickler.ensureConstructor import scala.collection.mutable import collection.mutable @@ -43,7 +43,7 @@ object Definitions { * */ class Definitions { - import Definitions._ + import Definitions.* private var initCtx: Context = uninitialized private given currentContext[Dummy_so_its_a_def]: Context = initCtx diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala index 6690cae3a142..59982fb99b5f 100644 --- a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala +++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala @@ -1,13 +1,13 @@ package dotty.tools.dotc package core -import Periods._ -import SymDenotations._ -import Contexts._ -import Types._ -import Symbols._ -import Denotations._ -import Phases._ +import Periods.* +import SymDenotations.* +import Contexts.* +import Types.* +import Symbols.* +import Denotations.* +import Phases.* object DenotTransformers { diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 1969c87d7a86..efbdfeb49246 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -3,24 +3,24 @@ package dotc package core import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } -import Contexts._ -import Names._ -import NameKinds._ -import StdNames._ +import Contexts.* +import Names.* +import NameKinds.* +import StdNames.* import Symbols.NoSymbol -import Symbols._ -import Types._ -import Periods._ -import Flags._ -import DenotTransformers._ -import Decorators._ -import Signature.MatchDegree._ -import printing.Texts._ +import Symbols.* +import Types.* +import Periods.* +import Flags.* +import DenotTransformers.* +import Decorators.* +import Signature.MatchDegree.* +import printing.Texts.* import printing.Printer import io.AbstractFile import config.Config import config.Printers.overload -import util.common._ +import util.common.* import typer.ProtoTypes.NoViewsAllowed import collection.mutable.ListBuffer diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index ab0611b89b22..1cbfabc08958 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -6,7 +6,7 @@ import Contexts.*, Decorators.*, Symbols.*, Types.* import NameKinds.UniqueName import config.Printers.{gadts, gadtsConstr} import util.{SimpleIdentitySet, SimpleIdentityMap} -import printing._ +import printing.* import scala.annotation.tailrec import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala index 79da5f1dcd6f..5ab2d4a2af03 100644 --- a/compiler/src/dotty/tools/dotc/core/Hashable.scala +++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package core -import Types._ +import Types.* import scala.util.hashing.{ MurmurHash3 => hashing } import annotation.tailrec @@ -40,7 +40,7 @@ object Hashable { } trait Hashable { - import Hashable._ + import Hashable.* protected def hashSeed: Int = getClass.hashCode diff --git a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala index 60fc4a4274e0..6244923cfb52 100644 --- a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala +++ b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package core -import Contexts._ +import Contexts.* import Flags.JavaDefined import StdNames.nme -import Symbols._ -import Types._ +import Symbols.* +import Types.* /** This module defines methods to interpret types of Java symbols, which are implicitly nullable in Java, * as Scala types, which are explicitly nullable. diff --git a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala index d8f41ef99b11..b6b316ac14d9 100644 --- a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala +++ b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.core -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.reporting.trace import dotty.tools.io.ClassPath diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 2109a5839380..5fc1b3137e90 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Types._, Contexts._, Symbols._, Decorators._ +import Types.*, Contexts.*, Symbols.*, Decorators.* import util.Property import Names.Name @@ -15,7 +15,7 @@ object MatchTypeTrace: case Stuck(scrut: Type, stuckCase: Type, otherCases: List[Type]) case NoInstance(scrut: Type, stuckCase: Type, fails: List[(Name, TypeBounds)]) case EmptyScrutinee(scrut: Type) - import TraceEntry._ + import TraceEntry.* private class MatchTrace: var entries: List[TraceEntry] = Nil diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index da578e9bf0b0..133d110cabda 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -10,7 +10,7 @@ package dotty.tools.dotc.core * Also, a setting is externally settable, while a mode isn't. */ case class Mode(val bits: Int) extends AnyVal { - import Mode._ + import Mode.* def | (that: Mode): Mode = Mode(bits | that.bits) def & (that: Mode): Mode = Mode(bits & that.bits) def &~ (that: Mode): Mode = Mode(bits & ~that.bits) diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 3e8d398d05d6..d4f009cbbbd5 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -2,12 +2,12 @@ package dotty.tools package dotc package core -import Names._ -import NameOps._ -import StdNames._ -import NameTags._ -import Contexts._ -import Decorators._ +import Names.* +import NameOps.* +import StdNames.* +import NameTags.* +import Contexts.* +import Decorators.* import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index bdb01a079148..415aa049c587 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -6,11 +6,11 @@ import java.security.MessageDigest import java.nio.CharBuffer import scala.io.Codec import Int.MaxValue -import Names._, StdNames._, Contexts._, Symbols._, Flags._, NameKinds._, Types._ +import Names.*, StdNames.*, Contexts.*, Symbols.*, Flags.*, NameKinds.*, Types.* import util.Chars.{isOperatorPart, digit2int} import Decorators.* -import Definitions._ -import nme._ +import Definitions.* +import nme.* object NameOps { diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index dc09edd79781..e4364d168267 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import Contexts._, Symbols._, Types._, Flags._, Scopes._, Decorators._, Names._, NameOps._ +import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme import TypeApplications.EtaExpansion diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 1e08379b57f0..3f9667b08067 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -13,7 +13,7 @@ import util.{LinearMap, HashSet} import scala.annotation.internal.sharable object Names { - import NameKinds._ + import NameKinds.* /** Things that can be turned into names with `toTermName` and `toTypeName`. * Decorators implements these as extension methods for strings. diff --git a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala index e18271772ff1..4f22f9d31e36 100644 --- a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala +++ b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package core -import Contexts._ -import Types._ +import Contexts.* +import Types.* /** Defines operations on nullable types and tree. */ object NullOpsDecorator: @@ -49,7 +49,7 @@ object NullOpsDecorator: } end extension - import ast.tpd._ + import ast.tpd.* extension (self: Tree) // cast the type of the tree to a non-nullable type diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 0f05778266fd..29e665956091 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package core -import Types._, Contexts._, Symbols._, Decorators._, TypeApplications._ +import Types.*, Contexts.*, Symbols.*, Decorators.*, TypeApplications.* import util.{SimpleIdentitySet, SimpleIdentityMap} import collection.mutable import printing.Printer -import printing.Texts._ +import printing.Texts.* import config.Config import config.Printers.constr import reflect.ClassTag @@ -126,7 +126,7 @@ object OrderingConstraint { val empty = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentitySet.empty) } -import OrderingConstraint._ +import OrderingConstraint.* /** Constraint over undetermined type parameters that keeps separate maps to * reflect parameter orderings. diff --git a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala index e88d6540e64b..e499f718365a 100644 --- a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.core import Names.Name -import Contexts._ +import Contexts.* import Types.Type import Variances.{Variance, varianceToInt} diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 5e8a960608e6..4e3596ea8814 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -2,14 +2,14 @@ package dotty.tools package dotc package core -import Decorators._ -import Symbols._ -import Types._ -import Flags._ +import Decorators.* +import Symbols.* +import Types.* +import Flags.* import Contexts.ctx import dotty.tools.dotc.reporting.trace import config.Feature.migrateTo3 -import config.Printers._ +import config.Printers.* trait PatternTypeConstrainer { self: TypeComparer => @@ -76,7 +76,7 @@ trait PatternTypeConstrainer { self: TypeComparer => def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts) { def classesMayBeCompatible: Boolean = { - import Flags._ + import Flags.* val patCls = pat.classSymbol val scrCls = scrut.classSymbol !patCls.exists || !scrCls.exists || { diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index ee877fb538d4..019c5932b3c9 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.core -import Contexts._ +import Contexts.* import Phases.unfusedPhases object Periods { diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index dd354b66cb4a..e04d829d1e60 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package core -import Periods._ -import Contexts._ +import Periods.* +import Contexts.* import dotty.tools.backend.jvm.GenBCode -import DenotTransformers._ -import Denotations._ -import Decorators._ +import DenotTransformers.* +import Denotations.* +import Decorators.* import config.Printers.config import scala.collection.mutable.ListBuffer -import dotty.tools.dotc.transform.MegaPhase._ -import dotty.tools.dotc.transform._ -import Periods._ +import dotty.tools.dotc.transform.MegaPhase.* +import dotty.tools.dotc.transform.* +import Periods.* import parsing.Parser import printing.XprintMode import typer.{TyperPhase, RefChecks} diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 7762ce8da339..7df5a7fa3c09 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -7,14 +7,14 @@ package dotty.tools package dotc package core -import Symbols._ +import Symbols.* import Types.{TermRef, NoPrefix} -import Flags._ -import Names._ -import Contexts._ -import Phases._ -import Denotations._ -import printing.Texts._ +import Flags.* +import Names.* +import Contexts.* +import Phases.* +import Denotations.* +import printing.Texts.* import printing.Printer import SymDenotations.NoDenotation diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala index bd744ec01846..f62d594d639d 100644 --- a/compiler/src/dotty/tools/dotc/core/Signature.scala +++ b/compiler/src/dotty/tools/dotc/core/Signature.scala @@ -3,9 +3,9 @@ package core import scala.annotation.tailrec -import Names._, Types._, Contexts._, StdNames._, Decorators._ +import Names.*, Types.*, Contexts.*, StdNames.*, Decorators.* import TypeErasure.sigName -import Signature._ +import Signature.* /** The signature of a denotation. * diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 4fc7ea4185d8..1436b41b521c 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -4,9 +4,9 @@ package core import scala.collection.mutable import scala.annotation.switch import scala.annotation.internal.sharable -import Names._ -import Symbols._ -import Contexts._ +import Names.* +import Symbols.* +import Contexts.* object StdNames { diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index bd30177adcb4..96da91293d91 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package core -import Types._, Symbols._, Contexts._ +import Types.*, Symbols.*, Contexts.* import cc.CaptureSet.IdempotentCaptRefMap /** Substitution operations on types. See the corresponding `subst` and diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index c2e0e6f717dd..e18e1463f3ae 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package core -import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._ -import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._ -import NameOps._, NameKinds._ +import Periods.*, Contexts.*, Symbols.*, Denotations.*, Names.*, NameOps.*, Annotations.* +import Types.*, Flags.*, Decorators.*, DenotTransformers.*, StdNames.*, Scopes.* +import NameOps.*, NameKinds.* import Phases.{Phase, typerPhase, unfusedPhases} import Constants.Constant import TypeApplications.TypeParamInfo import Scopes.Scope import dotty.tools.io.AbstractFile -import Decorators._ -import ast._ +import Decorators.* +import ast.* import ast.Trees.{LambdaTypeTree, TypeBoundsTree} import Trees.Literal import Variances.Variance @@ -21,9 +21,9 @@ import util.Stats import java.util.WeakHashMap import scala.util.control.NonFatal import config.Config -import reporting._ +import reporting.* import collection.mutable -import transform.TypeUtils._ +import transform.TypeUtils.* import cc.{CapturingType, derivedCapturingType} import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 12eea3a26df4..5f6078a14625 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -11,11 +11,11 @@ import dotty.tools.dotc.classpath.FileUtils.isTasty import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions -import Contexts._, Symbols._, Flags._, SymDenotations._, Types._, Scopes._, Names._ -import NameOps._ -import StdNames._ +import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.* +import NameOps.* +import StdNames.* import classfile.{ClassfileParser, ClassfileTastyUUIDParser} -import Decorators._ +import Decorators.* import util.Stats import reporting.trace @@ -29,7 +29,7 @@ import dotty.tools.dotc.core.tasty.TastyUnpickler object SymbolLoaders { - import ast.untpd._ + import ast.untpd.* /** A marker trait for a completer that replaces the original * Symbol loader for an unpickled root. diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 9c7c6a9a18bf..a41d194693e6 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -2,23 +2,23 @@ package dotty.tools package dotc package core -import Periods._ -import Names._ -import Scopes._ -import Flags._ -import Decorators._ -import Contexts._ -import Phases._ -import SymDenotations._ -import Denotations._ -import printing.Texts._ +import Periods.* +import Names.* +import Scopes.* +import Flags.* +import Decorators.* +import Contexts.* +import Phases.* +import SymDenotations.* +import Denotations.* +import printing.Texts.* import printing.Printer -import Types._ -import util.Spans._ -import DenotTransformers._ -import StdNames._ -import NameOps._ -import transform.SymUtils._ +import Types.* +import util.Spans.* +import DenotTransformers.* +import StdNames.* +import NameOps.* +import transform.SymUtils.* import NameKinds.LazyImplicitName import ast.tpd import tpd.{Tree, TreeProvider, TreeOps} diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 1cd1a3ad4d39..e725df199476 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package core -import Types._ -import Contexts._ -import Symbols._ +import Types.* +import Contexts.* +import Symbols.* import SymDenotations.LazyType -import Decorators._ -import util.Stats._ -import Names._ +import Decorators.* +import util.Stats.* +import Names.* import StdNames.nme import Flags.{Module, Provisional} import dotty.tools.dotc.config.Config @@ -154,7 +154,7 @@ object TypeApplications { } } -import TypeApplications._ +import TypeApplications.* /** A decorator that provides methods for modeling type application */ class TypeApplications(val self: Type) extends AnyVal { diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d5b97dca6164..f853d28e6361 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package core -import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._ -import Decorators._ +import Types.*, Contexts.*, Symbols.*, Flags.*, Names.*, NameOps.*, Denotations.* +import Decorators.* import Phases.{gettersPhase, elimByNamePhase} import StdNames.nme import TypeOps.refineUsingParent @@ -13,11 +13,11 @@ import config.Config import config.Feature.migrateTo3 import config.Printers.{subtyping, gadts, matchTypes, noPrinter} import TypeErasure.{erasedLub, erasedGlb} -import TypeApplications._ +import TypeApplications.* import Variances.{Variance, variancesConform} import Constants.Constant -import transform.TypeUtils._ -import transform.SymUtils._ +import transform.TypeUtils.* +import transform.SymUtils.* import scala.util.control.NonFatal import typer.ProtoTypes.constrained import typer.Applications.productSelectorTypes @@ -29,7 +29,7 @@ import NameKinds.WildcardParamName /** Provides methods to compare types. */ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling, PatternTypeConstrainer { - import TypeComparer._ + import TypeComparer.* Stats.record("TypeComparer") private var myContext: Context = initctx @@ -3342,7 +3342,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { * subtraces; never print backtraces starting with `<==`. */ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeComparer(initctx) { - import TypeComparer._ + import TypeComparer.* init(initctx) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index c914dcdf7e5c..9dfa33b739c7 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package core -import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Phases._ +import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.* import Flags.JavaDefined import Uniques.unique import TypeOps.makePackageObjPrefixExplicit import backend.sjs.JSDefinitions -import transform.ExplicitOuter._ -import transform.ValueClasses._ -import transform.TypeUtils._ -import transform.ContextFunctionResults._ +import transform.ExplicitOuter.* +import transform.ValueClasses.* +import transform.TypeUtils.* +import transform.ContextFunctionResults.* import unpickleScala2.Scala2Erasure -import Decorators._ +import Decorators.* import Definitions.MaxImplementedFunctionArity import scala.annotation.tailrec @@ -404,7 +404,7 @@ object TypeErasure { tp1 // After erasure, T | Nothing is just T and C | Null is just C, if C is a reference type. else tp1 match { case JavaArrayType(elem1) => - import dotty.tools.dotc.transform.TypeUtils._ + import dotty.tools.dotc.transform.TypeUtils.* tp2 match { case JavaArrayType(elem2) => if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) @@ -571,7 +571,7 @@ object TypeErasure { erasure(functionType(applyInfo)) } -import TypeErasure._ +import TypeErasure.* /** * @param sourceLanguage Adapt our erasure rules to mimic what the given language diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index dcc4874bc34f..2fa769e25852 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package core -import Types._ -import Symbols._ -import Flags._ -import Names._ -import Contexts._ -import SymDenotations._ -import Denotations._ -import Decorators._ -import reporting._ +import Types.* +import Symbols.* +import Flags.* +import Names.* +import Contexts.* +import SymDenotations.* +import Denotations.* +import Decorators.* +import reporting.* import ast.untpd import config.Printers.{cyclicErrors, noPrinter} diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 5f63f4871c5f..7f9f7099d805 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -2,21 +2,21 @@ package dotty.tools package dotc package core -import Contexts._, Types._, Symbols._, Names._, NameKinds.*, Flags._ -import SymDenotations._ -import util.Spans._ +import Contexts.*, Types.*, Symbols.*, Names.*, NameKinds.*, Flags.* +import SymDenotations.* +import util.Spans.* import util.Stats -import Decorators._ -import StdNames._ +import Decorators.* +import StdNames.* import collection.mutable -import ast.tpd._ +import ast.tpd.* import reporting.trace import config.Printers.typr import config.Feature import transform.SymUtils.* -import typer.ProtoTypes._ +import typer.ProtoTypes.* import typer.ForceDegree -import typer.Inferencing._ +import typer.Inferencing.* import typer.IfBottom import reporting.TestingReporter import cc.{CapturingType, derivedCapturingType, CaptureSet, isBoxed, isBoxedCapturing} diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index 54ddd4c0eb6b..ef7329c3698d 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package core -import Types._ -import Contexts._ +import Types.* +import Contexts.* import util.SimpleIdentitySet -import reporting._ +import reporting.* import config.Config import config.Printers.constr import collection.mutable import java.lang.ref.WeakReference import util.{Stats, SimpleIdentityMap} -import Decorators._ +import Decorators.* import scala.annotation.internal.sharable import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index b7090862dbea..61e16f1be668 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2,32 +2,32 @@ package dotty.tools package dotc package core -import Symbols._ -import Flags._ -import Names._ -import StdNames._, NameOps._ -import NullOpsDecorator._ +import Symbols.* +import Flags.* +import Names.* +import StdNames.*, NameOps.* +import NullOpsDecorator.* import NameKinds.SkolemName -import Scopes._ -import Constants._ -import Contexts._ -import Phases._ -import Annotations._ -import SymDenotations._ -import Decorators._ -import Denotations._ -import Periods._ -import CheckRealizable._ +import Scopes.* +import Constants.* +import Contexts.* +import Phases.* +import Annotations.* +import SymDenotations.* +import Decorators.* +import Denotations.* +import Periods.* +import CheckRealizable.* import Variances.{Variance, setStructuralVariances, Invariant} import typer.Nullables -import util.Stats._ +import util.Stats.* import util.{SimpleIdentityMap, SimpleIdentitySet} -import ast.tpd._ +import ast.tpd.* import ast.TreeTypeMap -import printing.Texts._ +import printing.Texts.* import printing.Printer -import Hashable._ -import Uniques._ +import Hashable.* +import Uniques.* import collection.mutable import config.Config import annotation.{tailrec, constructorOnly} @@ -42,7 +42,7 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.transform.TypeUtils.isErasedClass object Types { @@ -3882,7 +3882,7 @@ object Types { } trait TermLambda extends LambdaType { thisLambdaType => - import DepStatus._ + import DepStatus.* type ThisName = TermName type PInfo = Type type This >: this.type <: TermLambda diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala index 4078a2b1051a..da6b0aba88bd 100644 --- a/compiler/src/dotty/tools/dotc/core/Uniques.scala +++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package core -import Types._, Contexts._, util.Stats._, Hashable._, Names._ +import Types.*, Contexts.*, util.Stats.*, Hashable.*, Names.* import config.Config -import Decorators._ +import Decorators.* import util.{WeakHashSet, Stats} import WeakHashSet.Entry import scala.annotation.tailrec diff --git a/compiler/src/dotty/tools/dotc/core/Variances.scala b/compiler/src/dotty/tools/dotc/core/Variances.scala index 2401b43c8e17..e18a31e46769 100644 --- a/compiler/src/dotty/tools/dotc/core/Variances.scala +++ b/compiler/src/dotty/tools/dotc/core/Variances.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package core -import Types._, Contexts._, Flags._, Symbols._, Annotations._ +import Types.*, Contexts.*, Flags.*, Symbols.*, Annotations.* import TypeApplications.TypeParamInfo -import Decorators._ +import Decorators.* object Variances { diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 4aa60d973264..6ad71c5fd1ce 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -331,7 +331,7 @@ object ClassfileConstants { inline val impdep1 = 0xfe inline val impdep2 = 0xff - import Flags._ + import Flags.* abstract class FlagTranslation { protected def baseFlags(jflags: Int): FlagSet = EmptyFlags diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index a56ac695b57a..93ebcfeee62a 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -7,11 +7,11 @@ import scala.language.unsafeNulls import dotty.tools.tasty.{ TastyReader, TastyHeaderUnpickler } -import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._, Decorators._ -import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Spans._ -import Phases._ +import Contexts.*, Symbols.*, Types.*, Names.*, StdNames.*, NameOps.*, Scopes.*, Decorators.* +import SymDenotations.*, unpickleScala2.Scala2Unpickler.*, Constants.*, Annotations.*, util.Spans.* +import Phases.* import ast.{ tpd, untpd } -import ast.tpd._, util._ +import ast.tpd.*, util.* import java.io.IOException import java.lang.Integer.toHexString @@ -51,7 +51,7 @@ object ClassfileParser { def majorVersion: Int = (version >> 32).toInt def minorVersion: Int = (version & 0xFFFFFFFFL).toInt - import ClassfileConstants._ + import ClassfileConstants.* /** Marker trait for unpicklers that can be embedded in classfiles. */ trait Embedded @@ -270,8 +270,8 @@ class ClassfileParser( classRoot: ClassDenotation, moduleRoot: ClassDenotation)(ictx: Context) { - import ClassfileConstants._ - import ClassfileParser._ + import ClassfileConstants.* + import ClassfileParser.* protected val staticModule: Symbol = moduleRoot.sourceModule(using ictx) diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala index 0393744dde5c..e2220e40c6b4 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala @@ -4,13 +4,13 @@ package core.classfile import scala.language.unsafeNulls import scala.compiletime.uninitialized -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.util._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.util.* import dotty.tools.io.AbstractFile import dotty.tools.tasty.TastyReader @@ -22,7 +22,7 @@ import java.util.UUID class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) { - import ClassfileConstants._ + import ClassfileConstants.* private var pool: ConstantPool = uninitialized // the classfile's constant pool private var classfileVersion: Header.Version = Header.Version.Unknown diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala index eb0d140df51e..1bbea6447bf3 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala @@ -13,7 +13,7 @@ import TastyBuffer.Addr import java.nio.charset.StandardCharsets class CommentUnpickler(reader: TastyReader) { - import reader._ + import reader.* private[tasty] lazy val comments: HashMap[Addr, Comment] = { val comments = new HashMap[Addr, Comment] diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index b35c5c9f1acc..bb818edc1f82 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -5,9 +5,9 @@ package tasty import scala.language.unsafeNulls -import Contexts._, SymDenotations._, Decorators._ +import Contexts.*, SymDenotations.*, Decorators.* import dotty.tools.dotc.ast.tpd -import TastyUnpickler._ +import TastyUnpickler.* import classfile.ClassfileParser import Names.SimpleName import TreeUnpickler.UnpickleMode @@ -42,8 +42,8 @@ object DottyUnpickler { * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) */ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider { - import tpd._ - import DottyUnpickler._ + import tpd.* + import DottyUnpickler.* val unpickler: TastyUnpickler = new TastyUnpickler(bytes) private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index 1ddcf9afe1dc..5e2aee33859c 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -4,17 +4,17 @@ package core package tasty import dotty.tools.tasty.TastyBuffer -import TastyBuffer._ +import TastyBuffer.* import collection.mutable import Names.{Name, chrs, SimpleName, DerivedName, TypeName} -import NameKinds._ -import NameOps._ +import NameKinds.* +import NameOps.* import scala.io.Codec import NameTags.{SIGNED, TARGETSIGNED} class NameBuffer extends TastyBuffer(10000) { - import NameBuffer._ + import NameBuffer.* private val nameRefs = new mutable.LinkedHashMap[Name, NameRef] diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index 924b87bec003..86076517021a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -5,19 +5,19 @@ package tasty import dotty.tools.tasty.TastyFormat.{SOURCE, PositionsSection} import dotty.tools.tasty.TastyBuffer -import TastyBuffer._ +import TastyBuffer.* -import ast._ +import ast.* import Trees.WithLazyFields import util.{SourceFile, NoSource} -import core._ -import Annotations._, Decorators._ +import core.* +import Annotations.*, Decorators.* import collection.mutable -import util.Spans._ +import util.Spans.* import reporting.Message object PositionPickler: - import ast.tpd._ + import ast.tpd.* // Note: This could be just TreeToAddr => Addr if functions are specialized to value classes. // We use a SAM type to avoid boxing of Addr diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index bc58138f2db6..b7f88155e3c8 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -9,12 +9,12 @@ import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader} import TastyFormat.SOURCE import TastyBuffer.{Addr, NameRef} -import util.Spans._ +import util.Spans.* import Names.TermName /** Unpickler for tree positions */ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { - import reader._ + import reader.* private var myLineSizes: Array[Int] = uninitialized private var mySpans: util.HashMap[Addr, Span] = uninitialized diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala index c938868a3c48..0a7068b65445 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala @@ -5,10 +5,10 @@ package tasty import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.NameRef -import Contexts._, Decorators._ +import Contexts.*, Decorators.* import Names.TermName import StdNames.nme -import TastyUnpickler._ +import TastyUnpickler.* import dotty.tools.tasty.TastyFormat.ASTsSection /** Reads the package and class name of the class contained in this TASTy */ @@ -21,9 +21,9 @@ class TastyClassName(bytes: Array[Byte]) { def readName(): Option[(TermName, TermName)] = unpickle(new TreeSectionUnpickler) class TreeSectionUnpickler extends SectionUnpickler[(TermName, TermName)](ASTsSection) { - import dotty.tools.tasty.TastyFormat._ + import dotty.tools.tasty.TastyFormat.* def unpickle(reader: TastyReader, tastyName: NameTable): (TermName, TermName) = { - import reader._ + import reader.* def readNames(packageName: TermName): (TermName, TermName) = { val tag = readByte() if (tag >= firstLengthTreeTag) { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index 4f1e84ac9184..556265c66ce9 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -6,12 +6,12 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.{TastyBuffer, TastyFormat, TastyHash} -import TastyFormat._ -import TastyBuffer._ +import TastyFormat.* +import TastyBuffer.* import collection.mutable import core.Symbols.ClassSymbol -import Decorators._ +import Decorators.* object TastyPickler { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 5876b69edfde..9fe3fb282aa2 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -5,9 +5,9 @@ package tasty import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.NameRef -import Contexts._, Decorators._ +import Contexts.*, Decorators.* import Names.Name -import TastyUnpickler._ +import TastyUnpickler.* import util.Spans.offsetToInt import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection} import java.nio.file.{Files, Paths} @@ -98,12 +98,12 @@ class TastyPrinter(bytes: Array[Byte]) { } class TreeSectionUnpickler extends SectionUnpickler[String](ASTsSection) { - import dotty.tools.tasty.TastyFormat._ + import dotty.tools.tasty.TastyFormat.* private val sb: StringBuilder = new StringBuilder def unpickle(reader: TastyReader, tastyName: NameTable): String = { - import reader._ + import reader.* var indent = 0 def newLine() = { val length = treeStr("%5d".format(index(currentAddr) - index(startAddr))) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 679df42daca8..3a6f1e02a705 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -6,12 +6,12 @@ import scala.language.unsafeNulls import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig} import TastyHeaderUnpickler.TastyVersion -import TastyFormat.NameTags._, TastyFormat.nameTagToString +import TastyFormat.NameTags.*, TastyFormat.nameTagToString import TastyBuffer.NameRef import scala.collection.mutable import Names.{TermName, termName, EmptyTermName} -import NameKinds._ +import NameKinds.* object TastyUnpickler { @@ -60,10 +60,10 @@ object TastyUnpickler { } -import TastyUnpickler._ +import TastyUnpickler.* class TastyUnpickler(reader: TastyReader) { - import reader._ + import reader.* def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index a04f05cb820c..2e4fe9967d6a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -5,16 +5,16 @@ package tasty import scala.language.unsafeNulls -import dotty.tools.tasty.TastyFormat._ -import dotty.tools.tasty.TastyBuffer._ +import dotty.tools.tasty.TastyFormat.* +import dotty.tools.tasty.TastyBuffer.* -import ast.Trees._ +import ast.Trees.* import ast.{untpd, tpd} -import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, Flags._ +import Contexts.*, Symbols.*, Types.*, Names.*, Constants.*, Decorators.*, Annotations.*, Flags.* import Comments.{Comment, CommentsContext} -import NameKinds._ +import NameKinds.* import StdNames.nme -import transform.SymUtils._ +import transform.SymUtils.* import config.Config import collection.mutable import reporting.{Profile, NoProfile} @@ -27,9 +27,9 @@ object TreePickler: class TreePickler(pickler: TastyPickler) { val buf: TreeBuffer = new TreeBuffer pickler.newSection(ASTsSection, buf) - import buf._ + import buf.* import pickler.nameBuffer.nameIndex - import tpd._ + import tpd.* import TreePickler.* private val symRefs = Symbols.MutableSymbolMap[Addr](256) @@ -734,7 +734,7 @@ class TreePickler(pickler: TastyPickler) { } def pickleModifiers(sym: Symbol, mdef: MemberDef)(using Context): Unit = { - import Flags._ + import Flags.* var flags = sym.flags val privateWithin = sym.privateWithin if (privateWithin.exists) { @@ -749,7 +749,7 @@ class TreePickler(pickler: TastyPickler) { } def pickleFlags(flags: FlagSet, isTerm: Boolean)(using Context): Unit = { - import Flags._ + import Flags.* def writeModTag(tag: Int) = { assert(isModifierTag(tag)) writeByte(tag) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 840bb3bd94fb..6a02605b6ed7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -6,43 +6,43 @@ package tasty import scala.language.unsafeNulls import Comments.CommentsContext -import Contexts._ -import Symbols._ -import Types._ -import Scopes._ -import SymDenotations._ -import Denotations._ -import Names._ -import NameOps._ -import StdNames._ -import Flags._ -import Constants._ -import Annotations._ -import NameKinds._ -import NamerOps._ -import ContextOps._ +import Contexts.* +import Symbols.* +import Types.* +import Scopes.* +import SymDenotations.* +import Denotations.* +import Names.* +import NameOps.* +import StdNames.* +import Flags.* +import Constants.* +import Annotations.* +import NameKinds.* +import NamerOps.* +import ContextOps.* import Variances.Invariant import TastyUnpickler.NameTable import typer.ConstFold import typer.Checking.checkNonCyclic -import typer.Nullables._ -import util.Spans._ +import typer.Nullables.* +import util.Spans.* import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} -import Trees._ -import Decorators._ -import transform.SymUtils._ +import Trees.* +import Decorators.* +import transform.SymUtils.* import dotty.tools.dotc.quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} -import TastyBuffer._ +import TastyBuffer.* import scala.annotation.{switch, tailrec} import scala.collection.mutable.ListBuffer import scala.collection.mutable import config.Printers.pickling -import dotty.tools.tasty.TastyFormat._ +import dotty.tools.tasty.TastyFormat.* import scala.annotation.constructorOnly import scala.annotation.internal.sharable @@ -57,8 +57,8 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameTable, posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler]) { - import TreeUnpickler._ - import tpd._ + import TreeUnpickler.* + import tpd.* /** A map from addresses of definition entries to the symbols they define */ private val symAtAddr = new mutable.HashMap[Addr, Symbol] @@ -124,7 +124,7 @@ class TreeUnpickler(reader: TastyReader, } class Completer(reader: TastyReader)(using @constructorOnly _ctx: Context) extends LazyType { - import reader._ + import reader.* val owner = ctx.owner val mode = ctx.mode val source = ctx.source @@ -151,7 +151,7 @@ class TreeUnpickler(reader: TastyReader, } class TreeReader(val reader: TastyReader) { - import reader._ + import reader.* def forkAt(start: Addr): TreeReader = new TreeReader(subReader(start, endAddr)) def fork: TreeReader = forkAt(currentAddr) diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala index 2aeb1bdeefcc..e4c253fddc53 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala @@ -3,7 +3,7 @@ package dotc package core package unpickleScala2 -import Flags._ +import Flags.* /** Variable length byte arrays, with methods for basic pickling and unpickling. * @@ -195,7 +195,7 @@ object PickleBuffer { private type FlagMap = Array[Array[Long]] private val (scalaTermFlagMap, scalaTypeFlagMap) = { - import Scala2Flags._ + import Scala2Flags.* val corr = Map( PROTECTED_PKL -> Protected, diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala index cc2d7dd7ee56..78d1666ad580 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala @@ -3,8 +3,8 @@ package dotc package core package unpickleScala2 -import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Phases._ -import Decorators._ +import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.* +import Decorators.* import scala.collection.mutable.ListBuffer /** Erasure logic specific to Scala 2 symbols. */ diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 3e211e75b73b..3b51496e4ff1 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -9,30 +9,30 @@ import java.io.IOException import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble -import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._ -import StdNames._, Denotations._, NameOps._, Flags._, Constants._, Annotations._, Phases._ +import Contexts.*, Symbols.*, Types.*, Scopes.*, SymDenotations.*, Names.*, NameOps.* +import StdNames.*, Denotations.*, NameOps.*, Flags.*, Constants.*, Annotations.*, Phases.* import NameKinds.{Scala2MethodNameKinds, SuperAccessorName, ExpandedName} -import util.Spans._ -import dotty.tools.dotc.ast.{tpd, untpd}, ast.tpd._ +import util.Spans.* +import dotty.tools.dotc.ast.{tpd, untpd}, ast.tpd.* import ast.untpd.Modifiers import backend.sjs.JSDefinitions -import printing.Texts._ +import printing.Texts.* import printing.Printer import io.AbstractFile -import util.common._ +import util.common.* import util.NoSourcePosition import typer.Checking.checkNonCyclic -import typer.Nullables._ -import transform.SymUtils._ -import PickleBuffer._ -import PickleFormat._ -import Decorators._ -import TypeApplications._ +import typer.Nullables.* +import transform.SymUtils.* +import PickleBuffer.* +import PickleFormat.* +import Decorators.* +import TypeApplications.* import classfile.ClassfileParser import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.annotation.switch -import reporting._ +import reporting.* object Scala2Unpickler { @@ -146,7 +146,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // print("unpickling "); showPickled() // !!! DEBUG - import Scala2Unpickler._ + import Scala2Unpickler.* val moduleRoot: SymDenotation = inContext(ictx) { moduleClassRoot.sourceModule.denot } assert(moduleRoot.isTerm) diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index aa7a586d4b57..88893709b8bd 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package coverage -import ast.tpd._ +import ast.tpd.* import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags.* import java.nio.file.Path diff --git a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala index 85a56b9f1d15..16e7cc9c7adc 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala @@ -8,7 +8,7 @@ import java.nio.charset.StandardCharsets import scala.io.Codec -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.Phase import dotty.tools.io.File diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala index c148ff5f9bca..c1bd6b6778fd 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala @@ -4,10 +4,10 @@ package decompiler import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.* import dotty.tools.dotc.core.tasty.TastyHTMLPrinter -import dotty.tools.dotc.reporting._ +import dotty.tools.dotc.reporting.* import dotty.tools.io.AbstractFile import scala.quoted.runtime.impl.QuotesImpl diff --git a/compiler/src/dotty/tools/dotc/decompiler/Main.scala b/compiler/src/dotty/tools/dotc/decompiler/Main.scala index 3cc94f782793..ecd067d3debf 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/Main.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/Main.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.decompiler import java.nio.file.Files import dotty.tools.dotc -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.io.AbstractFile /** Main class of the `dotc -decompiler` decompiler. diff --git a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala index 62f3e75d2001..7f396654749e 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.decompiler -import dotty.tools.dotc.fromtasty._ +import dotty.tools.dotc.fromtasty.* import dotty.tools.dotc.core.Phases.Phase /** Compiler from tasty to user readable high text representation diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index f032f8d4d065..4969882b7766 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package fromtasty -import core._ -import Decorators._ -import Contexts._ +import core.* +import Decorators.* +import Contexts.* import Symbols.{Symbol, ClassSymbol} import SymDenotations.ClassDenotation import Denotations.staticRef -import NameOps._ +import NameOps.* import ast.Trees.Tree import Phases.Phase diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala index 923892b62f13..c0adf454b063 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package fromtasty -import core._ -import Contexts._ +import core.* +import Contexts.* import Phases.Phase class TASTYCompiler extends Compiler { diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index fb0abe3332ed..2f4ecad8859d 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -5,7 +5,7 @@ package fromtasty import scala.language.unsafeNulls import io.{JarArchive, AbstractFile, Path} -import core.Contexts._ +import core.Contexts.* import core.Decorators.em import java.io.File diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 8d594448dc2a..07472ee9b4dd 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -28,7 +28,7 @@ import scala.annotation.constructorOnly /** General support for inlining */ object Inliner: - import tpd._ + import tpd.* private[inlines] type DefBuffer = mutable.ListBuffer[ValOrDefDef] @@ -143,8 +143,8 @@ end Inliner * @param rhsToInline the body of the inlineable method that replaces the call. */ class Inliner(val call: tpd.Tree)(using Context): - import tpd._ - import Inliner._ + import tpd.* + import Inliner.* private val methPart = funPart(call) protected val callTypeArgs = typeArgss(call).flatten @@ -734,7 +734,7 @@ class Inliner(val call: tpd.Tree)(using Context): */ class InlineTyper(initialErrorCount: Int, @constructorOnly nestingLevel: Int = ctx.nestingLevel + 1) extends ReTyper(nestingLevel): - import reducer._ + import reducer.* override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = { tpe match { diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 25e9b1480370..0accbad5b473 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -5,7 +5,7 @@ package inlines import ast.*, core.* import Flags.*, Symbols.*, Types.*, Decorators.*, Constants.*, Contexts.* import StdNames.{tpnme, nme} -import transform.SymUtils._ +import transform.SymUtils.* import typer.* import NameKinds.BodyRetainerName import SymDenotations.SymDenotation @@ -22,7 +22,7 @@ import util.Spans.Span /** Support for querying inlineable methods and for inlining calls to such methods */ object Inlines: - import tpd._ + import tpd.* /** An exception signalling that an inline info cannot be computed due to a * cyclic reference. i14772.scala shows a case where this happens. @@ -395,7 +395,7 @@ object Inlines: * @param rhsToInline the body of the inlineable method that replaces the call. */ private class InlineCall(call: tpd.Tree)(using Context) extends Inliner(call): - import tpd._ + import tpd.* import Inlines.* /** The Inlined node representing the inlined call */ diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 060c8d21f390..10b55d69bf37 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -3,20 +3,20 @@ package dotc package inlines import dotty.tools.dotc.ast.{Trees, tpd, untpd} -import Trees._ -import core._ -import Flags._ -import Symbols._ -import Flags._ -import Types._ -import Decorators._ +import Trees.* +import core.* +import Flags.* +import Symbols.* +import Flags.* +import Types.* +import Decorators.* import StdNames.nme -import Contexts._ +import Contexts.* import Names.{Name, TermName} import NameKinds.{InlineAccessorName, UniqueInlineName} import inlines.Inlines -import NameOps._ -import Annotations._ +import NameOps.* +import Annotations.* import transform.{AccessProxies, Splicer} import staging.CrossStageSafety import transform.SymUtils.* @@ -25,7 +25,7 @@ import util.Property import staging.StagingLevel object PrepareInlineable { - import tpd._ + import tpd.* private val InlineAccessorsKey = new Property.Key[InlineAccessors] diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 8fb844f1f333..6e91254c2d72 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -3,15 +3,15 @@ package dotty.tools.dotc.interactive import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.config.Printers.interactiv -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Denotations.SingleDenotation -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.{Name, TermName} import dotty.tools.dotc.core.NameKinds.SimpleNameKind -import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Phases -import dotty.tools.dotc.core.Scopes._ +import dotty.tools.dotc.core.Scopes.* import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol, defn, newSymbol} import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.SymDenotations.SymDenotation @@ -42,7 +42,7 @@ case class Completion(label: String, description: String, symbols: List[Symbol]) object Completion: - import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dotc.ast.tpd.* /** Get possible completions from tree at `pos` * diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala index fd6d426f39bb..6c8e3b61cd01 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala @@ -4,21 +4,21 @@ package interactive import scala.language.unsafeNulls -import scala.collection._ +import scala.collection.* import ast.{NavigateAST, Trees, tpd, untpd} -import core._ -import Decorators._, ContextOps._ -import Contexts._, Flags._, Names._, NameOps._, Symbols._, Trees._, Types._ -import transform.SymUtils._ -import util.Spans._, util.SourceFile, util.SourcePosition +import core.* +import Decorators.*, ContextOps.* +import Contexts.*, Flags.*, Names.*, NameOps.*, Symbols.*, Trees.*, Types.* +import transform.SymUtils.* +import util.Spans.*, util.SourceFile, util.SourcePosition /** High-level API to get information out of typed trees, designed to be used by IDEs. * * @see `InteractiveDriver` to get typed trees from code. */ object Interactive { - import ast.tpd._ + import ast.tpd.* object Include { case class Set private[Include] (val bits: Int) extends AnyVal { diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala index 38a93125a342..af1484896c72 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package interactive -import core._ -import Phases._ -import parsing._ -import typer._ +import core.* +import Phases.* +import parsing.* +import typer.* class InteractiveCompiler extends Compiler { // TODO: Figure out what phases should be run in IDEs diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala index 2a2860cd1ba3..b00cd1036018 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala @@ -5,28 +5,28 @@ package interactive import scala.language.unsafeNulls import java.net.URI -import java.io._ -import java.nio.file._ +import java.io.* +import java.nio.file.* import java.nio.file.attribute.BasicFileAttributes import java.nio.charset.StandardCharsets -import java.util.zip._ +import java.util.zip.* -import scala.collection._ +import scala.collection.* import scala.io.Codec import dotty.tools.io.AbstractFile import ast.{Trees, tpd} -import core._, core.Decorators._ -import Contexts._, Names._, NameOps._, Symbols._, SymDenotations._, Trees._, Types._ +import core.*, core.Decorators.* +import Contexts.*, Names.*, NameOps.*, Symbols.*, SymDenotations.*, Trees.*, Types.* import Denotations.staticRef -import classpath._ -import reporting._ -import util._ +import classpath.* +import reporting.* +import util.* /** A Driver subclass designed to be used from IDEs */ class InteractiveDriver(val settings: List[String]) extends Driver { - import tpd._ + import tpd.* override def sourcesRequired: Boolean = false @@ -148,7 +148,7 @@ class InteractiveDriver(val settings: List[String]) extends Driver { def run(uri: URI, sourceCode: String): List[Diagnostic] = run(uri, SourceFile.virtual(uri, sourceCode)) def run(uri: URI, source: SourceFile): List[Diagnostic] = { - import typer.ImportInfo._ + import typer.ImportInfo.* val previousCtx = myCtx try { diff --git a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala index 60f01396e91e..5480d4a43043 100644 --- a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala +++ b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala @@ -4,9 +4,9 @@ package interactive import ast.tpd -import core._ -import Contexts._, NameOps._, Symbols._, StdNames._ -import util._, util.Spans._ +import core.* +import Contexts.*, NameOps.*, Symbols.*, StdNames.* +import util.*, util.Spans.* /** * A `tree` coming from `source` @@ -55,7 +55,7 @@ object SourceTree { !sym.source.exists) // FIXME: We cannot deal with external projects yet Nil else { - import ast.Trees._ + import ast.Trees.* def sourceTreeOfClass(tree: tpd.Tree): Option[SourceTree] = tree match { case PackageDef(_, stats) => stats.flatMap(sourceTreeOfClass).headOption diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala index a3769f4f813d..aa0e9a47f775 100644 --- a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala +++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package parsing -import util.Chars._ +import util.Chars.* import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 6ec896dcb200..8e075acdf5e3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -6,31 +6,31 @@ import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.FlagSet -import JavaTokens._ -import JavaScanners._ +import JavaTokens.* +import JavaScanners.* import Scanners.Offset -import Parsers._ -import core._ -import Contexts._ -import Names._ -import Types._ -import ast.Trees._ -import Decorators._ -import StdNames._ -import reporting._ +import Parsers.* +import core.* +import Contexts.* +import Names.* +import Types.* +import ast.Trees.* +import Decorators.* +import StdNames.* +import reporting.* import dotty.tools.dotc.util.SourceFile -import util.Spans._ +import util.Spans.* import scala.collection.mutable.{ListBuffer, LinkedHashMap} object JavaParsers { - import ast.untpd._ + import ast.untpd.* class JavaParser(source: SourceFile)(using Context) extends ParserCommon(source) { val definitions: Definitions = ctx.definitions - import definitions._ + import definitions.* val in: JavaScanner = new JavaScanner(source) diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index 6a1d5d8b216c..f50dcdda438c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package parsing -import core.Contexts._ +import core.Contexts.* import core.Names.SimpleName -import Scanners._ +import Scanners.* import util.SourceFile -import JavaTokens._ +import JavaTokens.* import scala.annotation.{switch, tailrec} -import util.Chars._ +import util.Chars.* import PartialFunction.cond import core.Decorators.em diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index c845ea8f74c7..a1fb5632d77f 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -8,35 +8,35 @@ import scala.annotation.internal.sharable import scala.collection.mutable.ListBuffer import scala.collection.immutable.BitSet import util.{ SourceFile, SourcePosition, NoSourcePosition } -import Tokens._ -import Scanners._ +import Tokens.* +import Scanners.* import xml.MarkupParsers.MarkupParser -import core._ -import Flags._ -import Contexts._ -import Names._ +import core.* +import Flags.* +import Contexts.* +import Names.* import NameKinds.{WildcardParamName, QualifiedName} -import NameOps._ +import NameOps.* import ast.{Positioned, Trees} -import ast.Trees._ -import StdNames._ -import util.Spans._ -import Constants._ +import ast.Trees.* +import StdNames.* +import util.Spans.* +import Constants.* import Symbols.NoSymbol -import ScriptParsers._ -import Decorators._ +import ScriptParsers.* +import Decorators.* import util.Chars import scala.annotation.tailrec import rewrites.Rewrites.{patch, overlapsPatch} -import reporting._ +import reporting.* import config.Feature import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} -import config.SourceVersion._ +import config.SourceVersion.* import config.SourceVersion object Parsers { - import ast.untpd._ + import ast.untpd.* case class OpInfo(operand: Tree, operator: Ident, offset: Offset) @@ -1233,7 +1233,7 @@ object Parsers { case EXPOLIT => return Number(digits, NumberKind.Floating) case _ => } - import scala.util.FromDigits._ + import scala.util.FromDigits.* val value = try token match { case INTLIT => intFromDigits(digits, in.base) diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 0339fc0531f4..44b0c43e545b 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -4,14 +4,14 @@ package parsing import scala.language.unsafeNulls -import core.Names._, core.Contexts._, core.Decorators._, util.Spans._ -import core.StdNames._, core.Comments._ +import core.Names.*, core.Contexts.*, core.Decorators.*, util.Spans.* +import core.StdNames.*, core.Comments.* import util.SourceFile -import util.Chars._ +import util.Chars.* import util.{SourcePosition, CharBuffer} import util.Spans.Span import config.Config -import Tokens._ +import Tokens.* import scala.annotation.{switch, tailrec} import scala.collection.mutable import scala.collection.immutable.SortedMap diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala index d11db73b0455..d71e4cf11102 100644 --- a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala @@ -3,9 +3,9 @@ package dotc package parsing import util.SourceFile -import core._ -import Contexts._ -import Parsers._ +import core.* +import Contexts.* +import Parsers.* /**

Performs the following context-free rewritings:

@@ -45,7 +45,7 @@ import Parsers._ */ object ScriptParsers { - import ast.untpd._ + import ast.untpd.* class ScriptParser(source: SourceFile)(using Context) extends Parser(source) { @@ -118,7 +118,7 @@ object ScriptParsers { * } * } */ - import definitions._ + import definitions.* def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) } def emptyInit = DefDef( diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index eea68594b08d..fbf4e8d701dd 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -5,7 +5,7 @@ package parsing import scala.language.unsafeNulls import collection.immutable.BitSet -import core.Decorators._ +import core.Decorators.* import core.StdNames.nme abstract class TokensCommon { diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala index ee3ecda60aee..0e51b487d7c4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/package.scala +++ b/compiler/src/dotty/tools/dotc/parsing/package.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc -import util.Chars._ +import util.Chars.* import core.Names.Name import core.StdNames.nme -import core.NameOps._ +import core.NameOps.* package object parsing { diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala index 0f7d426fbd28..803470fe85a5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala @@ -9,7 +9,7 @@ package dotty.tools.dotc package parsing package xml -import Utility._ +import Utility.* import util.Chars.SU import scala.collection.BufferedIterator diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 0f0a2a3ee61f..5567b4f569d5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -12,13 +12,13 @@ import core.Contexts.Context import mutable.{ Buffer, ArrayBuffer, ListBuffer } import scala.util.control.ControlThrowable import util.Chars.SU -import Parsers._ -import util.Spans._ -import core._ -import Constants._ +import Parsers.* +import util.Spans.* +import core.* +import Constants.* import Decorators.{em, toMessage} import util.SourceFile -import Utility._ +import Utility.* // XXX/Note: many/most of the functions in here are almost direct cut and pastes @@ -39,7 +39,7 @@ import Utility._ */ object MarkupParsers { - import ast.untpd._ + import ast.untpd.* case object MissingEndTagControl extends ControlThrowable { override def getMessage: String = "start tag was here: " diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala index 6af2250a5dd4..d1f2875064d4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala @@ -7,12 +7,12 @@ import scala.language.unsafeNulls import scala.compiletime.uninitialized import scala.collection.mutable -import core._ -import Decorators._ +import core.* +import Decorators.* import Flags.Mutable -import Names._, StdNames._, ast.Trees._, ast.{tpd, untpd} -import Symbols._, Contexts._ -import util.Spans._ +import Names.*, StdNames.*, ast.Trees.*, ast.{tpd, untpd} +import Symbols.*, Contexts.* +import util.Spans.* import Parsers.Parser /** This class builds instance of `Tree` that represent XML. @@ -29,7 +29,7 @@ import Parsers.Parser class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { import Constants.Constant - import untpd._ + import untpd.* import parser.atSpan diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala index 87412cf6d69c..6577030ec671 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala @@ -130,7 +130,7 @@ object Utility { * See [4] and Appendix B of XML 1.0 specification. */ def isNameChar(ch: Char): Boolean = { - import java.lang.Character._ + import java.lang.Character.* // The constants represent groups Mc, Me, Mn, Lm, and Nd. isNameStart(ch) || (getType(ch).toByte match { @@ -151,7 +151,7 @@ object Utility { * See [3] and Appendix B of XML 1.0 specification */ def isNameStart(ch: Char): Boolean = { - import java.lang.Character._ + import java.lang.Character.* getType(ch).toByte match { case LOWERCASE_LETTER | diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index 30ef94239ac1..ce77a5b9d97a 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -3,10 +3,10 @@ package plugins import scala.language.unsafeNulls -import core._ -import Contexts._ -import Phases._ -import dotty.tools.io._ +import core.* +import Contexts.* +import Phases.* +import dotty.tools.io.* import transform.MegaPhase.MiniPhase import java.io.InputStream diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 12acc3d7b4c3..4d6e44a3bea7 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -3,12 +3,12 @@ package plugins import scala.language.unsafeNulls -import core._ -import Contexts._ +import core.* +import Contexts.* import Decorators.em import config.{ PathResolver, Feature } -import dotty.tools.io._ -import Phases._ +import dotty.tools.io.* +import Phases.* import config.Printers.plugins.{ println => debug } import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index c0be888955e9..02f470324e8a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -6,12 +6,12 @@ import scala.language.unsafeNulls import scala.collection.mutable -import core._ -import Texts._, Types._, Flags._, Symbols._, Contexts._ -import Decorators._ +import core.* +import Texts.*, Types.*, Flags.*, Symbols.*, Contexts.* +import Decorators.* import reporting.Message import util.DiffUtil -import Highlighting._ +import Highlighting.* object Formatting { diff --git a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala index ceb5afdea750..c9b3e2a5aa83 100644 --- a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala @@ -3,7 +3,7 @@ package dotc package printing import scala.collection.mutable -import core.Contexts._ +import core.Contexts.* object Highlighting { diff --git a/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala b/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala index c9ac4a5af4ce..24f02f37956e 100644 --- a/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala +++ b/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package printing -import core._ -import Contexts._ +import core.* +import Contexts.* import util.Property import Texts.Text diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 60a25413d8e5..d5c60cf44579 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -1,14 +1,14 @@ package dotty.tools.dotc package printing -import core._ -import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, Denotations._ -import StdNames._ -import Contexts._ +import core.* +import Texts.*, Types.*, Flags.*, Names.*, Symbols.*, NameOps.*, Constants.*, Denotations.* +import StdNames.* +import Contexts.* import Scopes.Scope, Denotations.Denotation, Annotations.Annotation import StdNames.nme -import ast.Trees._ -import typer.Implicits._ +import ast.Trees.* +import typer.Implicits.* import typer.ImportInfo import Variances.varianceSign import util.SourcePosition diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index eafa399313da..8687925ed5fb 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package printing -import core._ -import Texts._, ast.Trees._ +import core.* +import Texts.*, ast.Trees.* import Types.{Type, SingletonType, LambdaParam, NamedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 70bb6f89663c..9426842363a7 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -2,29 +2,29 @@ package dotty.tools package dotc package printing -import core._ +import core.* import Constants.* -import Texts._ -import Types._ -import Flags._ -import Names._ -import Symbols._ -import NameOps._ +import Texts.* +import Types.* +import Flags.* +import Names.* +import Symbols.* +import NameOps.* import TypeErasure.ErasedValueType -import Contexts._ +import Contexts.* import Annotations.Annotation -import Denotations._ -import SymDenotations._ +import Denotations.* +import SymDenotations.* import StdNames.{nme, tpnme} import ast.{Trees, tpd, untpd} import typer.{Implicits, Namer, Applications} -import typer.ProtoTypes._ -import Trees._ -import TypeApplications._ +import typer.ProtoTypes.* +import Trees.* +import TypeApplications.* import NameKinds.{WildcardParamName, DefaultGetterName} import util.Chars.isOperatorPart -import transform.TypeUtils._ -import transform.SymUtils._ +import transform.TypeUtils.* +import transform.SymUtils.* import config.{Config, Feature} import dotty.tools.dotc.util.SourcePosition @@ -345,7 +345,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { - import untpd._ + import untpd.* def isLocalThis(tree: Tree) = tree.typeOpt match { case tp: ThisType => tp.cls == ctx.owner.enclosingClass @@ -781,7 +781,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { super.toTextCapturing(tp, refsText, boxText) override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { - import untpd._ + import untpd.* var txt = toTextCore(tree) @@ -924,7 +924,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { - import untpd._ + import untpd.* dclTextOr(tree) { val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) val isExtension = tree.hasType && tree.symbol.is(ExtensionMethod) diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index ea3afef27fae..f02cbf159224 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -2,13 +2,13 @@ package dotty.tools.dotc.printing import dotty.tools.dotc.core.Constants import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.printing.Texts._ +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.printing.Texts.* class ReplPrinter(_ctx: Context) extends RefinedPrinter(_ctx) { diff --git a/compiler/src/dotty/tools/dotc/printing/Showable.scala b/compiler/src/dotty/tools/dotc/printing/Showable.scala index 4a0e68861a1a..4480aa9c76a4 100644 --- a/compiler/src/dotty/tools/dotc/printing/Showable.scala +++ b/compiler/src/dotty/tools/dotc/printing/Showable.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package printing -import core._ +import core.* -import Contexts._, Texts._, Decorators._ +import Contexts.*, Texts.*, Decorators.* import config.Config.summarizeDepth trait Showable extends Any { diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala index 7030776dd06c..6f65320d2c8e 100644 --- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala @@ -4,11 +4,11 @@ package printing import scala.language.unsafeNulls import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.parsing.Parsers.Parser import dotty.tools.dotc.parsing.Scanners.Scanner -import dotty.tools.dotc.parsing.Tokens._ +import dotty.tools.dotc.parsing.Tokens.* import dotty.tools.dotc.reporting.Reporter import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.SourceFile @@ -87,7 +87,7 @@ object SyntaxHighlighting { highlightPosition(comment.span, CommentColor) object TreeHighlighter extends untpd.UntypedTreeTraverser { - import untpd._ + import untpd.* def ignored(tree: NameTree) = { val name = tree.name.toTermName diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 64cc08160701..a13c9d41b529 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -11,7 +11,7 @@ import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.io.AbstractFile import annotation.internal.sharable @@ -83,7 +83,7 @@ private [profile] object NoOpProfiler extends Profiler { override def finished(): Unit = () } private [profile] object RealProfiler { - import scala.jdk.CollectionConverters._ + import scala.jdk.CollectionConverters.* val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean val memoryMx: MemoryMXBean = ManagementFactory.getMemoryMXBean val gcMx: List[GarbageCollectorMXBean] = ManagementFactory.getGarbageCollectorMXBeans.asScala.toList @@ -106,7 +106,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { - import RealProfiler._ + import RealProfiler.* val current = Thread.currentThread() ProfileSnap( diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala index 1e9969e0bc65..e3ea69d9be06 100644 --- a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala +++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala @@ -3,11 +3,11 @@ package dotty.tools.dotc.profile import scala.language.unsafeNulls import java.util.concurrent.ThreadPoolExecutor.AbortPolicy -import java.util.concurrent._ +import java.util.concurrent.* import java.util.concurrent.atomic.AtomicInteger import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* sealed trait ThreadPoolFactory { diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index ef1ab91e3e2a..c124e12077fe 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -11,19 +11,19 @@ import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.TreeMapWithImplicits -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Constants.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Denotations.staticRef -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameKinds.FlatName -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.TypeErasure -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.typer.ImportInfo.withRootImports import dotty.tools.dotc.util.SrcPos import dotty.tools.dotc.reporting.Message @@ -32,8 +32,8 @@ import dotty.tools.dotc.core.CyclicReference /** Tree interpreter for metaprogramming constructs */ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): - import Interpreter._ - import tpd._ + import Interpreter.* + import tpd.* val classLoader = if ctx.owner.topLevelClass.name.startsWith(str.REPL_SESSION_LINE) then @@ -328,7 +328,7 @@ object Interpreter: class StopInterpretation(val msg: Message, val pos: SrcPos) extends Exception object Call: - import tpd._ + import tpd.* /** Matches an expression that is either a field access or an application * It returns a TermRef containing field accessed or a method reference and the arguments passed to it. */ diff --git a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala index 141b349826b4..d8ba1b72ce3b 100644 --- a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala +++ b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.quoted import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.typer.Typer import dotty.tools.dotc.util.{Property, SourcePosition} diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 1b354abf929f..a9b66fc056e2 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -1,14 +1,14 @@ package dotty.tools.dotc.quoted -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.{TreeTypeMap, tpd} -import dotty.tools.dotc.config.Printers._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.config.Printers.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Mode -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter, TreePickler } import dotty.tools.dotc.core.tasty.DottyUnpickler import dotty.tools.dotc.core.tasty.TreeUnpickler.UnpickleMode @@ -16,14 +16,14 @@ import dotty.tools.dotc.report import dotty.tools.dotc.reporting.Message import scala.quoted.Quotes -import scala.quoted.runtime.impl._ +import scala.quoted.runtime.impl.* import scala.collection.mutable -import QuoteUtils._ +import QuoteUtils.* object PickledQuotes { - import tpd._ + import tpd.* /** Pickle the tree of the quote into strings */ def pickleQuote(tree: Tree)(using Context): List[String] = diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala index 48884f6b2d6e..eb5395194d11 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala @@ -18,12 +18,12 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.TypeOps.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.reporting.IllegalVariableInPatternAlternative -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import scala.collection.mutable object QuotePatterns: - import tpd._ + import tpd.* /** Check for restricted patterns */ def checkPattern(quotePattern: QuotePattern)(using Context): Unit = new tpd.TreeTraverser { diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala index 604c8da3420a..a015c726c59f 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc.quoted import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Symbols.* object QuoteUtils: - import tpd._ + import tpd.* /** Get the owner of a tree if it has one */ def treeOwner(tree: Tree)(using Context): Option[Symbol] = { diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala index c063e437cb19..4147e49b87ce 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc.quoted -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.ast.tpd object QuotesCache { - import tpd._ + import tpd.* /** A key to be used in a context property that caches the unpickled trees */ private val QuotesCacheKey = new Property.Key[collection.mutable.Map[String | List[String], Tree]] diff --git a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala index 87d0cbb7be08..cfc09a8ed836 100644 --- a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala +++ b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala @@ -3,11 +3,11 @@ package reflect import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* object FromSymbol { diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 75261fb6890e..142561dcbbee 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc -import reporting._ -import Diagnostic._ +import reporting.* +import Diagnostic.* import util.{SourcePosition, NoSourcePosition, SrcPos} -import core._ -import Contexts._, Flags.*, Symbols._, Decorators._ +import core.* +import Contexts.*, Flags.*, Symbols.*, Decorators.* import config.SourceVersion -import ast._ +import ast.* import config.Feature.sourceVersion import java.lang.System.currentTimeMillis diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala index a95af962c053..ce7477f4da70 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* import java.io.{ BufferedReader, PrintWriter } import Diagnostic.Error diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index 624aa93924e8..7a8edb233aee 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -5,12 +5,12 @@ package reporting import scala.language.unsafeNulls import dotty.tools.dotc.config.Settings.Setting -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} import dotty.tools.dotc.util.SourcePosition import java.util.{Collections, Optional, List => JList} -import scala.util.chaining._ +import scala.util.chaining.* import core.Decorators.toMessage object Diagnostic: diff --git a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala index f78fd3bd190b..a9b2f68d07d6 100644 --- a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala +++ b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package reporting -import core._ -import Contexts._ +import core.* +import Contexts.* import Decorators.*, Symbols.*, Names.*, Types.*, Flags.* import typer.ProtoTypes.{FunProto, SelectionProto} import transform.SymUtils.isNoValue diff --git a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala index a2062bd1b2c7..f469c03764c0 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala @@ -6,7 +6,7 @@ import scala.language.unsafeNulls import collection.mutable import core.Contexts.Context -import Diagnostic._ +import Diagnostic.* /** A re-usable Reporter used in Contexts#test */ class ExploringReporter extends StoreReporter(null, fromTyperState = false): diff --git a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala index 9b6a3c75ba5d..5910d9b4d656 100644 --- a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* /** * This trait implements `isHidden` so that we avoid reporting non-sensical messages. diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index f53359fb8b19..6881235e3dc1 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -6,11 +6,11 @@ import scala.language.unsafeNulls import java.lang.System.{lineSeparator => EOL} -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import printing.Highlighting.{Blue, Red, Yellow} import printing.SyntaxHighlighting -import Diagnostic._ +import Diagnostic.* import util.{ SourcePosition, NoSourcePosition } import util.Chars.{ LF, CR, FF, SU } import scala.annotation.switch @@ -210,7 +210,7 @@ trait MessageRendering { } private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit = - import dia._ + import dia.* val hasId = msg.errorId.errorNumber >= 0 val category = dia match { case _: UncheckedWarning => "unchecked" @@ -228,7 +228,7 @@ trait MessageRendering { /** The whole message rendered from `msg` */ def messageAndPos(dia: Diagnostic)(using Context): String = { - import dia._ + import dia.* val pos1 = pos.nonInlined val inlineStack = inlinePosStack(pos).filter(_ != pos1) val maxLineNumber = diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 3be1a159c55c..f567e094e831 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -4,11 +4,11 @@ package reporting import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol} -import dotty.tools.dotc.reporting.Diagnostic._ -import dotty.tools.dotc.reporting.Message._ +import dotty.tools.dotc.reporting.Diagnostic.* +import dotty.tools.dotc.reporting.Message.* import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} @@ -63,7 +63,7 @@ object Reporter { * error messages. */ abstract class Reporter extends interfaces.ReporterResult { - import Reporter._ + import Reporter.* /** Report a diagnostic */ def doReport(dia: Diagnostic)(using Context): Unit @@ -179,7 +179,7 @@ abstract class Reporter extends interfaces.ReporterResult { case _ => dia def go() = - import Action._ + import Action.* dia match case w: Warning => WConf.parsed.action(dia) match case Error => issueUnconfigured(w.toError) diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala index 9783a3208a60..aef5f2c5863b 100644 --- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* import collection.mutable import config.Printers.typr -import Diagnostic._ +import Diagnostic.* /** This class implements a Reporter that stores all messages * diff --git a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala index 8d7204a93fa2..c0b5ffe8e650 100644 --- a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala @@ -5,7 +5,7 @@ package reporting import scala.language.unsafeNulls import collection.mutable -import Diagnostic._ +import Diagnostic.* /** A re-usable Reporter used in Contexts#test */ class TestingReporter extends StoreReporter(null, fromTyperState = false): diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala index 153212522541..75c698a28ee4 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package reporting -import core.Contexts._ +import core.Contexts.* import Diagnostic.Error /** diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 71b2636ab8ed..d8426aa8781e 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -4,7 +4,7 @@ package reporting import scala.collection.mutable import util.SourceFile -import core.Contexts._ +import core.Contexts.* /** This trait implements `isHidden` so that multiple messages per position * are suppressed, unless they are of increasing severity. */ diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index 5303ccd7f219..29b5bccb7714 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -4,7 +4,7 @@ package reporting import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.SourcePosition import java.util.regex.PatternSyntaxException @@ -36,8 +36,8 @@ final case class WConf(confs: List[(List[MessageFilter], Action)]): }.getOrElse(Action.Warning) object WConf: - import Action._ - import MessageFilter._ + import Action.* + import MessageFilter.* private type Conf = (List[MessageFilter], Action) diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 919b23cee8a2..5c7eb1d0f775 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package reporting -import core._ -import Contexts._ -import Decorators._, Symbols._, Names._, NameOps._, Types._, Flags._, Phases._ +import core.* +import Contexts.* +import Decorators.*, Symbols.*, Names.*, NameOps.*, Types.*, Flags.*, Phases.* import Denotations.SingleDenotation import SymDenotations.SymDenotation import NameKinds.{WildcardParamName, ContextFunctionParamName} import parsing.Scanners.Token import parsing.Tokens -import printing.Highlighting._ +import printing.Highlighting.* import printing.Formatting -import ErrorMessageID._ +import ErrorMessageID.* import ast.Trees import config.{Feature, ScalaVersion} import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope} @@ -22,10 +22,10 @@ import typer.Inferencing import scala.util.control.NonFatal import StdNames.nme import printing.Formatting.hl -import ast.Trees._ +import ast.Trees.* import ast.untpd import ast.tpd -import transform.SymUtils._ +import transform.SymUtils.* import scala.util.matching.Regex import java.util.regex.Matcher.quoteReplacement import cc.CaptureSet.IdentityCaptRefMap @@ -1699,7 +1699,7 @@ class JavaEnumParentArgs(parent: Type)(using Context) class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) extends NamingMsg(CannotHaveSameNameAsID) { - import CannotHaveSameNameAs._ + import CannotHaveSameNameAs.* def reasonMessage(using Context): String = reason match { case CannotBeOverridden => "class definitions cannot be overridden" case DefinedInSelf(self) => @@ -2268,7 +2268,7 @@ extends NamingMsg(DoubleDefinitionID) { def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" def details(using Context): String = if (decl.isRealMethod && previousDecl.isRealMethod) { - import Signature.MatchDegree._ + import Signature.MatchDegree.* // compare the signatures when both symbols represent methods decl.signature.matchDegree(previousDecl.signature) match { diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index 5bea0fb66ed0..2586ad8604c3 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -3,7 +3,7 @@ package rewrites import util.{SourceFile, Spans} import Spans.Span -import core.Contexts._ +import core.Contexts.* import collection.mutable import scala.annotation.tailrec import dotty.tools.dotc.reporting.Reporter @@ -114,6 +114,6 @@ object Rewrites { * as an optional setting. */ class Rewrites { - import Rewrites._ + import Rewrites.* private val patched = new PatchedFiles } diff --git a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala index 833cf7f2e0ff..07fa2027fbe6 100644 --- a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala +++ b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala @@ -3,11 +3,11 @@ package sbt import scala.language.unsafeNulls -import core._ -import Contexts._ -import Flags._ -import Symbols._ -import NameOps._ +import core.* +import Contexts.* +import Flags.* +import Symbols.* +import NameOps.* import xsbti.api import xsbti.api.SafeLazy.strict @@ -24,7 +24,7 @@ object APIUtils { val EmptyType = api.EmptyType.of() } - import Constants._ + import Constants.* /** Registers a dummy class for sbt's incremental compilation. * diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index a1b43e463316..5561a241c975 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -5,20 +5,20 @@ import scala.language.unsafeNulls import ExtractDependencies.internalError import ast.{Positioned, Trees, tpd} -import core._ -import core.Decorators._ -import Annotations._ -import Contexts._ -import Flags._ -import Phases._ -import Trees._ -import Types._ -import Symbols._ -import Names._ -import NameOps._ +import core.* +import core.Decorators.* +import Annotations.* +import Contexts.* +import Flags.* +import Phases.* +import Trees.* +import Types.* +import Symbols.* +import Names.* +import NameOps.* import inlines.Inlines import transform.ValueClasses -import transform.SymUtils._ +import transform.SymUtils.* import dotty.tools.io.File import java.io.PrintWriter @@ -135,7 +135,7 @@ object ExtractAPI: * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation */ private class ExtractAPICollector(using Context) extends ThunkHolder { - import tpd._ + import tpd.* import xsbti.api /** This cache is necessary for correctness, see the comment about inherited @@ -614,7 +614,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { apiType(lo), apiType(hi)) def apiVariance(v: Int): api.Variance = { - import api.Variance._ + import api.Variance.* if (v < 0) Contravariant else if (v > 0) Covariant else Invariant diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 60cfeeced209..fbf6e08f8b60 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -9,22 +9,22 @@ import java.util.{Arrays, EnumSet} import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.classpath.FileUtils.{isTasty, hasClassExtension, hasTastyExtension} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameOps._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Denotations.StaleSymbol -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.util.{SrcPos, NoSourcePosition} import dotty.tools.io import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile} import xsbti.UseScope import xsbti.api.DependencyContext -import xsbti.api.DependencyContext._ +import xsbti.api.DependencyContext.* import scala.jdk.CollectionConverters.* @@ -51,7 +51,7 @@ import scala.compiletime.uninitialized * @see ExtractAPI */ class ExtractDependencies extends Phase { - import ExtractDependencies._ + import ExtractDependencies.* override def phaseName: String = ExtractDependencies.name @@ -119,7 +119,7 @@ object ExtractDependencies { * inheritance" in the "Name hashing algorithm" section. */ private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd.TreeTraverser { thisTreeTraverser => - import tpd._ + import tpd.* private def addMemberRefDependency(sym: Symbol)(using Context): Unit = if (!ignoreDependency(sym)) { diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala index cacb10cf98bc..61baebbe9517 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala @@ -11,7 +11,7 @@ package sbt import scala.language.unsafeNulls -import xsbti.api._ +import xsbti.api.* import scala.util.Try diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala index 975d5480fe9b..784b23cfc78c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala @@ -5,7 +5,7 @@ package semanticdb import dotty.tools.dotc.{semanticdb => s} import core.Contexts.Context -import core.Constants._ +import core.Constants.* object ConstantOps: extension (const: Constant) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 07f3fcea2e88..75805d4aed17 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -4,24 +4,24 @@ package semanticdb import scala.language.unsafeNulls -import core._ -import Phases._ -import ast.tpd._ +import core.* +import Phases.* +import ast.tpd.* import ast.Trees.{mods, WithEndMarker} -import Contexts._ -import Symbols._ -import Flags._ +import Contexts.* +import Symbols.* +import Flags.* import Names.Name import StdNames.nme -import NameOps._ +import NameOps.* import Denotations.StaleSymbol import util.Spans.Span import util.SourceFile -import transform.SymUtils._ +import transform.SymUtils.* import scala.collection.mutable import scala.annotation.{ threadUnsafe => tu, tailrec } -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import scala.PartialFunction.condOpt import typer.ImportInfo.withRootImports diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala index b53ee787f501..fdf159836878 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala @@ -4,7 +4,7 @@ import dotty.tools.dotc.{semanticdb => s} import scala.collection.mutable import dotty.tools.dotc.semanticdb.Scala3.given -import SymbolInformation.Kind._ +import SymbolInformation.Kind.* import dotty.tools.dotc.util.SourceFile class SymbolInformationPrinter (symtab: PrinterSymtab): val notes = InfoNotes() diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala index f1302330bd8b..f49b00089712 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala @@ -2,11 +2,11 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.core import core.Symbols.{ Symbol , defn, NoSymbol } -import core.Contexts._ +import core.Contexts.* import core.Names import core.Names.Name import core.Types.{Type, TypeBounds} -import core.Flags._ +import core.Flags.* import core.NameKinds import core.StdNames.nme import SymbolInformation.{Kind => k} @@ -20,8 +20,8 @@ import scala.annotation.internal.sharable import scala.annotation.switch object Scala3: - import Symbols._ - import core.NameOps._ + import Symbols.* + import core.NameOps.* @sharable private val unicodeEscape = raw"\$$u(\p{XDigit}{4})".r @sharable private val locals = raw"local(\d+)".r diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala index 0b92ebddb02c..6376fb86d6c5 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package semanticdb -import core._ -import Contexts._ -import Symbols._ -import Flags._ +import core.* +import Contexts.* +import Symbols.* +import Flags.* import Names.Name import scala.annotation.tailrec diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala index b2f26e3e992f..af38315a857e 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.ast.tpd._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.NameKinds import dotty.tools.dotc.{semanticdb => s} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala index 6c6e69f12578..ea95e34a57b9 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.semanticdb -import java.nio.file._ +import java.nio.file.* import java.nio.charset.StandardCharsets -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.semanticdb.Scala3.given @@ -92,7 +92,7 @@ object Tools: end metac private def schemaString(schema: Schema) = - import Schema._ + import Schema.* schema match case SEMANTICDB3 => "SemanticDB v3" case SEMANTICDB4 => "SemanticDB v4" @@ -101,7 +101,7 @@ object Tools: end schemaString private def languageString(language: Language) = - import Language._ + import Language.* language match case SCALA => "Scala" case JAVA => "Java" diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala index b0d032c7d83b..4293ecd6ca43 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package semanticdb -import core.Symbols._ +import core.Symbols.* import core.Contexts.Context -import core.Types._ +import core.Types.* import core.Annotations.Annotation import core.Flags import core.Names.Name @@ -18,7 +18,7 @@ import Scala3.{FakeSymbol, SemanticSymbol, WildcardTypeSymbol, TypeParamRefSymbo import dotty.tools.dotc.core.Names.Designator class TypeOps: - import SymbolScopeOps._ + import SymbolScopeOps.* import Scala3.given private val paramRefSymtab = mutable.Map[(LambdaType, Name), Symbol]() private val refinementSymtab = mutable.Map[(RefinedType, Name), Symbol]() @@ -245,7 +245,7 @@ class TypeOps: loop(tpe) def toSemanticType(sym: Symbol)(using LinkMode, SemanticSymbolBuilder, Context): s.Type = - import ConstantOps._ + import ConstantOps.* def loop(tpe: Type): s.Type = tpe match { case t if t.isFromJavaObject => loop(defn.AnyType) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala index c646e67b69ad..2d2621c34390 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual { object Access { case object Empty extends dotty.tools.dotc.semanticdb.Access - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Access def defaultInstance: dotty.tools.dotc.semanticdb.Access = Empty - + implicit val AccessTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] { override def toCustom(__base: dotty.tools.dotc.semanticdb.AccessMessage): dotty.tools.dotc.semanticdb.Access = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess => __v.value @@ -90,7 +90,7 @@ final case class AccessMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.privateAccess.foreach { __v => @@ -152,10 +152,10 @@ final case class AccessMessage( def withPublicAccess(__v: dotty.tools.dotc.semanticdb.PublicAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__v)) def clearSealedValue: AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue): AccessMessage = copy(sealedValue = __v) - - - - + + + + def toAccess: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Access]) } @@ -190,12 +190,12 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.AccessMessage( sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty ) @@ -226,7 +226,7 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class PrivateAccess(value: dotty.tools.dotc.semanticdb.PrivateAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.PrivateAccess @@ -298,10 +298,10 @@ final case class PrivateAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateAccess]) } @@ -319,12 +319,12 @@ object PrivateAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.do dotty.tools.dotc.semanticdb.PrivateAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateAccess( ) def of( @@ -339,10 +339,10 @@ final case class PrivateThisAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateThisAccess]) } @@ -360,12 +360,12 @@ object PrivateThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tool dotty.tools.dotc.semanticdb.PrivateThisAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateThisAccess( ) def of( @@ -382,7 +382,7 @@ final case class PrivateWithinAccess( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -398,7 +398,7 @@ final case class PrivateWithinAccess( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -409,10 +409,10 @@ final case class PrivateWithinAccess( }; } def withSymbol(__v: _root_.scala.Predef.String): PrivateWithinAccess = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateWithinAccess]) } @@ -434,12 +434,12 @@ object PrivateWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.to symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateWithinAccess( symbol = "" ) @@ -458,10 +458,10 @@ final case class ProtectedAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedAccess]) } @@ -479,12 +479,12 @@ object ProtectedAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools. dotty.tools.dotc.semanticdb.ProtectedAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedAccess( ) def of( @@ -499,10 +499,10 @@ final case class ProtectedThisAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedThisAccess]) } @@ -520,12 +520,12 @@ object ProtectedThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.to dotty.tools.dotc.semanticdb.ProtectedThisAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedThisAccess( ) def of( @@ -542,7 +542,7 @@ final case class ProtectedWithinAccess( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -558,7 +558,7 @@ final case class ProtectedWithinAccess( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -569,10 +569,10 @@ final case class ProtectedWithinAccess( }; } def withSymbol(__v: _root_.scala.Predef.String): ProtectedWithinAccess = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedWithinAccess]) } @@ -594,12 +594,12 @@ object ProtectedWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty. symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedWithinAccess( symbol = "" ) @@ -618,10 +618,10 @@ final case class PublicAccess( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PublicAccess]) } @@ -639,12 +639,12 @@ object PublicAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot dotty.tools.dotc.semanticdb.PublicAccess( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.PublicAccess( ) def of( diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala index 2cb478d89e2d..a4f076585a50 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -16,7 +16,7 @@ final case class Annotation( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -32,7 +32,7 @@ final case class Annotation( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -45,10 +45,10 @@ final case class Annotation( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): Annotation = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Annotation]) } @@ -70,12 +70,12 @@ object Annotation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Annotation( tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala index 0ca96d9ae8c6..91bbaa75e654 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual object Constant { case object Empty extends dotty.tools.dotc.semanticdb.Constant - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Constant def defaultInstance: dotty.tools.dotc.semanticdb.Constant = Empty - + implicit val ConstantTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] { override def toCustom(__base: dotty.tools.dotc.semanticdb.ConstantMessage): dotty.tools.dotc.semanticdb.Constant = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant => __v.value @@ -114,7 +114,7 @@ final case class ConstantMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.unitConstant.foreach { __v => @@ -208,10 +208,10 @@ final case class ConstantMessage( def withNullConstant(__v: dotty.tools.dotc.semanticdb.NullConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__v)) def clearSealedValue: ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue): ConstantMessage = copy(sealedValue = __v) - - - - + + + + def toConstant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Constant]) } @@ -254,12 +254,12 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantMessage( sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty ) @@ -298,7 +298,7 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class UnitConstant(value: dotty.tools.dotc.semanticdb.UnitConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.UnitConstant @@ -402,10 +402,10 @@ final case class UnitConstant( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnitConstant]) } @@ -423,12 +423,12 @@ object UnitConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot dotty.tools.dotc.semanticdb.UnitConstant( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnitConstant( ) def of( @@ -445,7 +445,7 @@ final case class BooleanConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != false) { @@ -461,7 +461,7 @@ final case class BooleanConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -472,10 +472,10 @@ final case class BooleanConstant( }; } def withValue(__v: _root_.scala.Boolean): BooleanConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.BooleanConstant]) } @@ -497,12 +497,12 @@ object BooleanConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools. value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.BooleanConstant( value = false ) @@ -523,7 +523,7 @@ final case class ByteConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -539,7 +539,7 @@ final case class ByteConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -550,10 +550,10 @@ final case class ByteConstant( }; } def withValue(__v: _root_.scala.Int): ByteConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByteConstant]) } @@ -575,12 +575,12 @@ object ByteConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByteConstant( value = 0 ) @@ -601,7 +601,7 @@ final case class ShortConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -617,7 +617,7 @@ final case class ShortConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -628,10 +628,10 @@ final case class ShortConstant( }; } def withValue(__v: _root_.scala.Int): ShortConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ShortConstant]) } @@ -653,12 +653,12 @@ object ShortConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ShortConstant( value = 0 ) @@ -679,7 +679,7 @@ final case class CharConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -695,7 +695,7 @@ final case class CharConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -706,10 +706,10 @@ final case class CharConstant( }; } def withValue(__v: _root_.scala.Int): CharConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.CharConstant]) } @@ -731,12 +731,12 @@ object CharConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.CharConstant( value = 0 ) @@ -757,7 +757,7 @@ final case class IntConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0) { @@ -773,7 +773,7 @@ final case class IntConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -784,10 +784,10 @@ final case class IntConstant( }; } def withValue(__v: _root_.scala.Int): IntConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntConstant]) } @@ -809,12 +809,12 @@ object IntConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntConstant( value = 0 ) @@ -835,7 +835,7 @@ final case class LongConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0L) { @@ -851,7 +851,7 @@ final case class LongConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -862,10 +862,10 @@ final case class LongConstant( }; } def withValue(__v: _root_.scala.Long): LongConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LongConstant]) } @@ -887,12 +887,12 @@ object LongConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LongConstant( value = 0L ) @@ -913,7 +913,7 @@ final case class FloatConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0.0f) { @@ -929,7 +929,7 @@ final case class FloatConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -940,10 +940,10 @@ final case class FloatConstant( }; } def withValue(__v: _root_.scala.Float): FloatConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FloatConstant]) } @@ -965,12 +965,12 @@ object FloatConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.FloatConstant( value = 0.0f ) @@ -991,7 +991,7 @@ final case class DoubleConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (__value != 0.0) { @@ -1007,7 +1007,7 @@ final case class DoubleConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1018,10 +1018,10 @@ final case class DoubleConstant( }; } def withValue(__v: _root_.scala.Double): DoubleConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.DoubleConstant]) } @@ -1043,12 +1043,12 @@ object DoubleConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.DoubleConstant( value = 0.0 ) @@ -1069,7 +1069,7 @@ final case class StringConstant( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = value if (!__value.isEmpty) { @@ -1085,7 +1085,7 @@ final case class StringConstant( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1096,10 +1096,10 @@ final case class StringConstant( }; } def withValue(__v: _root_.scala.Predef.String): StringConstant = copy(value = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StringConstant]) } @@ -1121,12 +1121,12 @@ object StringConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d value = __value ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.StringConstant( value = "" ) @@ -1145,10 +1145,10 @@ final case class NullConstant( final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.NullConstant]) } @@ -1166,12 +1166,12 @@ object NullConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot dotty.tools.dotc.semanticdb.NullConstant( ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.NullConstant( ) def of( diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala index cc8aa82bf8ea..5917ab82f59f 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -22,14 +22,14 @@ final case class Diagnostic( val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = severity.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(2, __value) } }; - + { val __value = message if (!__value.isEmpty) { @@ -45,7 +45,7 @@ final case class Diagnostic( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -72,10 +72,10 @@ final case class Diagnostic( def withRange(__v: dotty.tools.dotc.semanticdb.Range): Diagnostic = copy(range = Option(__v)) def withSeverity(__v: dotty.tools.dotc.semanticdb.Diagnostic.Severity): Diagnostic = copy(severity = __v) def withMessage(__v: _root_.scala.Predef.String): Diagnostic = copy(message = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Diagnostic]) } @@ -105,12 +105,12 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. message = __message ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Diagnostic( range = _root_.scala.None, severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY, @@ -123,49 +123,49 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. def isWarning: _root_.scala.Boolean = false def isInformation: _root_.scala.Boolean = false def isHint: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized]) } - + object Severity { sealed trait Recognized extends Severity - - + + @SerialVersionUID(0L) case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized { val index = 0 val name = "UNKNOWN_SEVERITY" override def isUnknownSeverity: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object ERROR extends Severity(1) with Severity.Recognized { val index = 1 val name = "ERROR" override def isError: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object WARNING extends Severity(2) with Severity.Recognized { val index = 2 val name = "WARNING" override def isWarning: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INFORMATION extends Severity(3) with Severity.Recognized { val index = 3 val name = "INFORMATION" override def isInformation: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object HINT extends Severity(4) with Severity.Recognized { val index = 4 val name = "HINT" override def isHint: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Severity(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_SEVERITY, ERROR, WARNING, INFORMATION, HINT) @@ -177,8 +177,8 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. case 4 => HINT case __other => Unrecognized(__other) } - - + + } final val RANGE_FIELD_NUMBER = 1 final val SEVERITY_FIELD_NUMBER = 2 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala index 07fbda4991af..695dea973016 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -17,14 +17,14 @@ final case class Documentation( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = message if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(1, __value) } }; - + { val __value = format.value if (__value != 0) { @@ -40,7 +40,7 @@ final case class Documentation( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -58,10 +58,10 @@ final case class Documentation( } def withMessage(__v: _root_.scala.Predef.String): Documentation = copy(message = __v) def withFormat(__v: dotty.tools.dotc.semanticdb.Documentation.Format): Documentation = copy(format = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Documentation]) } @@ -87,12 +87,12 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do format = __format ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Documentation( message = "", format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML @@ -104,49 +104,49 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do def isJavadoc: _root_.scala.Boolean = false def isScaladoc: _root_.scala.Boolean = false def isKdoc: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized]) } - + object Format { sealed trait Recognized extends Format - - + + @SerialVersionUID(0L) case object HTML extends Format(0) with Format.Recognized { val index = 0 val name = "HTML" override def isHtml: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object MARKDOWN extends Format(1) with Format.Recognized { val index = 1 val name = "MARKDOWN" override def isMarkdown: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object JAVADOC extends Format(2) with Format.Recognized { val index = 2 val name = "JAVADOC" override def isJavadoc: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SCALADOC extends Format(3) with Format.Recognized { val index = 3 val name = "SCALADOC" override def isScaladoc: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object KDOC extends Format(4) with Format.Recognized { val index = 4 val name = "KDOC" override def isKdoc: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Format(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(HTML, MARKDOWN, JAVADOC, SCALADOC, KDOC) @@ -158,8 +158,8 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do case 4 => KDOC case __other => Unrecognized(__other) } - - + + } final val MESSAGE_FIELD_NUMBER = 1 final val FORMAT_FIELD_NUMBER = 2 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala index c57a3d3cddc3..ef47e9020361 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { @@ -13,35 +13,35 @@ sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbG def isUnknownLanguage: _root_.scala.Boolean = false def isScala: _root_.scala.Boolean = false def isJava: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Language.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Language.Recognized]) } object Language { sealed trait Recognized extends Language - - + + @SerialVersionUID(0L) case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized { val index = 0 val name = "UNKNOWN_LANGUAGE" override def isUnknownLanguage: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SCALA extends Language(1) with Language.Recognized { val index = 1 val name = "SCALA" override def isScala: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object JAVA extends Language(2) with Language.Recognized { val index = 2 val name = "JAVA" override def isJava: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Language(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_LANGUAGE, SCALA, JAVA) @@ -51,6 +51,6 @@ object Language { case 2 => JAVA case __other => Unrecognized(__other) } - - + + } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala index a3667e944ae4..756b7711d304 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -17,7 +17,7 @@ final case class Location( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = uri if (!__value.isEmpty) { @@ -37,7 +37,7 @@ final case class Location( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -57,10 +57,10 @@ final case class Location( def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: Location = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): Location = copy(range = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Location]) } @@ -86,12 +86,12 @@ object Location extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se range = __range ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Location( uri = "", range = _root_.scala.None diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala index d273664bdf6a..0f7436524ee1 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -19,28 +19,28 @@ final case class Range( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = startLine if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(1, __value) } }; - + { val __value = startCharacter if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(2, __value) } }; - + { val __value = endLine if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(3, __value) } }; - + { val __value = endCharacter if (__value != 0) { @@ -56,7 +56,7 @@ final case class Range( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -88,10 +88,10 @@ final case class Range( def withStartCharacter(__v: _root_.scala.Int): Range = copy(startCharacter = __v) def withEndLine(__v: _root_.scala.Int): Range = copy(endLine = __v) def withEndCharacter(__v: _root_.scala.Int): Range = copy(endCharacter = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Range]) } @@ -125,12 +125,12 @@ object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman endCharacter = __endCharacter ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Range( startLine = 0, startCharacter = 0, diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala index 841e69166feb..c9239c85f409 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { @@ -13,35 +13,35 @@ sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGen def isLegacy: _root_.scala.Boolean = false def isSemanticdb3: _root_.scala.Boolean = false def isSemanticdb4: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Schema.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Schema.Recognized]) } object Schema { sealed trait Recognized extends Schema - - + + @SerialVersionUID(0L) case object LEGACY extends Schema(0) with Schema.Recognized { val index = 0 val name = "LEGACY" override def isLegacy: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SEMANTICDB3 extends Schema(3) with Schema.Recognized { val index = 1 val name = "SEMANTICDB3" override def isSemanticdb3: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SEMANTICDB4 extends Schema(4) with Schema.Recognized { val index = 2 val name = "SEMANTICDB4" override def isSemanticdb4: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Schema(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(LEGACY, SEMANTICDB3, SEMANTICDB4) @@ -51,6 +51,6 @@ object Schema { case 4 => SEMANTICDB4 case __other => Unrecognized(__other) } - - + + } \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala index 655ebe75185e..7a2ee40478c4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -34,7 +34,7 @@ final case class Scope( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { symlinks.foreach { __v => @@ -56,10 +56,10 @@ final case class Scope( def addHardlinks(__vs: dotty.tools.dotc.semanticdb.SymbolInformation *): Scope = addAllHardlinks(__vs) def addAllHardlinks(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = hardlinks ++ __vs) def withHardlinks(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Scope]) } @@ -85,12 +85,12 @@ object Scope extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman hardlinks = __hardlinks.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Scope( symlinks = _root_.scala.Seq.empty, hardlinks = _root_.scala.Seq.empty diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala index 228e2f02349b..7a0331be0ed3 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual object Signature { case object Empty extends dotty.tools.dotc.semanticdb.Signature - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Signature def defaultInstance: dotty.tools.dotc.semanticdb.Signature = Empty - + implicit val SignatureTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] { override def toCustom(__base: dotty.tools.dotc.semanticdb.SignatureMessage): dotty.tools.dotc.semanticdb.Signature = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature => __v.value @@ -72,7 +72,7 @@ final case class SignatureMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.classSignature.foreach { __v => @@ -110,10 +110,10 @@ final case class SignatureMessage( def withValueSignature(__v: dotty.tools.dotc.semanticdb.ValueSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__v)) def clearSealedValue: SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue): SignatureMessage = copy(sealedValue = __v) - - - - + + + + def toSignature: dotty.tools.dotc.semanticdb.Signature = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Signature]) } @@ -142,12 +142,12 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SignatureMessage( sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty ) @@ -172,7 +172,7 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class ClassSignature(value: dotty.tools.dotc.semanticdb.ClassSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.ClassSignature @@ -233,7 +233,7 @@ final case class ClassSignature( val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_parents.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self) if (__value.serializedSize != 0) { @@ -253,7 +253,7 @@ final case class ClassSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -294,10 +294,10 @@ final case class ClassSignature( def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: ClassSignature = copy(declarations = _root_.scala.None) def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(declarations = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ClassSignature]) } @@ -331,12 +331,12 @@ object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d declarations = __declarations ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ClassSignature( typeParameters = _root_.scala.None, parents = _root_.scala.Seq.empty, @@ -383,7 +383,7 @@ final case class MethodSignature( val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType) if (__value.serializedSize != 0) { @@ -399,7 +399,7 @@ final case class MethodSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -431,10 +431,10 @@ final case class MethodSignature( def addAllParameterLists(__vs: Iterable[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = parameterLists ++ __vs) def withParameterLists(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = __v) def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): MethodSignature = copy(returnType = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MethodSignature]) } @@ -464,12 +464,12 @@ object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools. returnType = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MethodSignature( typeParameters = _root_.scala.None, parameterLists = _root_.scala.Seq.empty, @@ -506,14 +506,14 @@ final case class TypeSignature( val __value = typeParameters.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound) if (__value.serializedSize != 0) { @@ -529,7 +529,7 @@ final case class TypeSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { typeParameters.foreach { __v => @@ -560,10 +560,10 @@ final case class TypeSignature( def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): TypeSignature = copy(typeParameters = Option(__v)) def withLowerBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(lowerBound = __v) def withUpperBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(upperBound = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeSignature]) } @@ -593,12 +593,12 @@ object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.do upperBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(__upperBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeSignature( typeParameters = _root_.scala.None, lowerBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), @@ -631,7 +631,7 @@ final case class ValueSignature( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -647,7 +647,7 @@ final case class ValueSignature( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -660,10 +660,10 @@ final case class ValueSignature( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ValueSignature = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ValueSignature]) } @@ -685,12 +685,12 @@ object ValueSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ValueSignature( tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala index 93fbb207c4f6..92917cb23a41 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -25,42 +25,42 @@ final case class SymbolInformation( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(1, __value) } }; - + { val __value = language.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(16, __value) } }; - + { val __value = kind.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(3, __value) } }; - + { val __value = properties if (__value != 0) { __size += SemanticdbOutputStream.computeInt32Size(4, __value) } }; - + { val __value = displayName if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(5, __value) } }; - + { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature) if (__value.serializedSize != 0) { @@ -71,7 +71,7 @@ final case class SymbolInformation( val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access) if (__value.serializedSize != 0) { @@ -95,7 +95,7 @@ final case class SymbolInformation( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -179,10 +179,10 @@ final case class SymbolInformation( def getDocumentation: dotty.tools.dotc.semanticdb.Documentation = documentation.getOrElse(dotty.tools.dotc.semanticdb.Documentation.defaultInstance) def clearDocumentation: SymbolInformation = copy(documentation = _root_.scala.None) def withDocumentation(__v: dotty.tools.dotc.semanticdb.Documentation): SymbolInformation = copy(documentation = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolInformation]) } @@ -240,12 +240,12 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool documentation = __documentation ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolInformation( symbol = "", language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE, @@ -276,126 +276,126 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool def isClass: _root_.scala.Boolean = false def isTrait: _root_.scala.Boolean = false def isInterface: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized]) } - + object Kind { sealed trait Recognized extends Kind - - + + @SerialVersionUID(0L) case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized { val index = 0 val name = "UNKNOWN_KIND" override def isUnknownKind: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object LOCAL extends Kind(19) with Kind.Recognized { val index = 1 val name = "LOCAL" override def isLocal: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object FIELD extends Kind(20) with Kind.Recognized { val index = 2 val name = "FIELD" override def isField: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object METHOD extends Kind(3) with Kind.Recognized { val index = 3 val name = "METHOD" override def isMethod: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CONSTRUCTOR extends Kind(21) with Kind.Recognized { val index = 4 val name = "CONSTRUCTOR" override def isConstructor: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object MACRO extends Kind(6) with Kind.Recognized { val index = 5 val name = "MACRO" override def isMacro: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TYPE extends Kind(7) with Kind.Recognized { val index = 6 val name = "TYPE" override def isType: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PARAMETER extends Kind(8) with Kind.Recognized { val index = 7 val name = "PARAMETER" override def isParameter: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SELF_PARAMETER extends Kind(17) with Kind.Recognized { val index = 8 val name = "SELF_PARAMETER" override def isSelfParameter: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TYPE_PARAMETER extends Kind(9) with Kind.Recognized { val index = 9 val name = "TYPE_PARAMETER" override def isTypeParameter: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object OBJECT extends Kind(10) with Kind.Recognized { val index = 10 val name = "OBJECT" override def isObject: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PACKAGE extends Kind(11) with Kind.Recognized { val index = 11 val name = "PACKAGE" override def isPackage: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PACKAGE_OBJECT extends Kind(12) with Kind.Recognized { val index = 12 val name = "PACKAGE_OBJECT" override def isPackageObject: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CLASS extends Kind(13) with Kind.Recognized { val index = 13 val name = "CLASS" override def isClass: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TRAIT extends Kind(14) with Kind.Recognized { val index = 14 val name = "TRAIT" override def isTrait: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INTERFACE extends Kind(18) with Kind.Recognized { val index = 15 val name = "INTERFACE" override def isInterface: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Kind(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_KIND, LOCAL, FIELD, METHOD, CONSTRUCTOR, MACRO, TYPE, PARAMETER, SELF_PARAMETER, TYPE_PARAMETER, OBJECT, PACKAGE, PACKAGE_OBJECT, CLASS, TRAIT, INTERFACE) @@ -418,8 +418,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 21 => CONSTRUCTOR case __other => Unrecognized(__other) } - - + + } sealed abstract class Property(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { type EnumType = Property @@ -444,161 +444,161 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool def isTransparent: _root_.scala.Boolean = false def isInfix: _root_.scala.Boolean = false def isOpaque: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized]) } - + object Property { sealed trait Recognized extends Property - - + + @SerialVersionUID(0L) case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized { val index = 0 val name = "UNKNOWN_PROPERTY" override def isUnknownProperty: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object ABSTRACT extends Property(4) with Property.Recognized { val index = 1 val name = "ABSTRACT" override def isAbstract: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object FINAL extends Property(8) with Property.Recognized { val index = 2 val name = "FINAL" override def isFinal: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object SEALED extends Property(16) with Property.Recognized { val index = 3 val name = "SEALED" override def isSealed: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object IMPLICIT extends Property(32) with Property.Recognized { val index = 4 val name = "IMPLICIT" override def isImplicit: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object LAZY extends Property(64) with Property.Recognized { val index = 5 val name = "LAZY" override def isLazy: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CASE extends Property(128) with Property.Recognized { val index = 6 val name = "CASE" override def isCase: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object COVARIANT extends Property(256) with Property.Recognized { val index = 7 val name = "COVARIANT" override def isCovariant: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object CONTRAVARIANT extends Property(512) with Property.Recognized { val index = 8 val name = "CONTRAVARIANT" override def isContravariant: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object VAL extends Property(1024) with Property.Recognized { val index = 9 val name = "VAL" override def isVal: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object VAR extends Property(2048) with Property.Recognized { val index = 10 val name = "VAR" override def isVar: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object STATIC extends Property(4096) with Property.Recognized { val index = 11 val name = "STATIC" override def isStatic: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object PRIMARY extends Property(8192) with Property.Recognized { val index = 12 val name = "PRIMARY" override def isPrimary: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object ENUM extends Property(16384) with Property.Recognized { val index = 13 val name = "ENUM" override def isEnum: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object DEFAULT extends Property(32768) with Property.Recognized { val index = 14 val name = "DEFAULT" override def isDefault: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object GIVEN extends Property(65536) with Property.Recognized { val index = 15 val name = "GIVEN" override def isGiven: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INLINE extends Property(131072) with Property.Recognized { val index = 16 val name = "INLINE" override def isInline: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object OPEN extends Property(262144) with Property.Recognized { val index = 17 val name = "OPEN" override def isOpen: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object TRANSPARENT extends Property(524288) with Property.Recognized { val index = 18 val name = "TRANSPARENT" override def isTransparent: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object INFIX extends Property(1048576) with Property.Recognized { val index = 19 val name = "INFIX" override def isInfix: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object OPAQUE extends Property(2097152) with Property.Recognized { val index = 20 val name = "OPAQUE" override def isOpaque: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Property(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_PROPERTY, ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY, CASE, COVARIANT, CONTRAVARIANT, VAL, VAR, STATIC, PRIMARY, ENUM, DEFAULT, GIVEN, INLINE, OPEN, TRANSPARENT, INFIX, OPAQUE) @@ -626,8 +626,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 2097152 => OPAQUE case __other => Unrecognized(__other) } - - + + } final val SYMBOL_FIELD_NUMBER = 1 final val LANGUAGE_FIELD_NUMBER = 16 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala index 5d7670dfdd32..39a5228ed02d 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -22,14 +22,14 @@ final case class SymbolOccurrence( val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = symbol if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(2, __value) } }; - + { val __value = role.value if (__value != 0) { @@ -45,7 +45,7 @@ final case class SymbolOccurrence( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -72,10 +72,10 @@ final case class SymbolOccurrence( def withRange(__v: dotty.tools.dotc.semanticdb.Range): SymbolOccurrence = copy(range = Option(__v)) def withSymbol(__v: _root_.scala.Predef.String): SymbolOccurrence = copy(symbol = __v) def withRole(__v: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role): SymbolOccurrence = copy(role = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolOccurrence]) } @@ -105,12 +105,12 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools role = __role ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolOccurrence( range = _root_.scala.None, symbol = "", @@ -121,35 +121,35 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools def isUnknownRole: _root_.scala.Boolean = false def isReference: _root_.scala.Boolean = false def isDefinition: _root_.scala.Boolean = false - + final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized]) } - + object Role { sealed trait Recognized extends Role - - + + @SerialVersionUID(0L) case object UNKNOWN_ROLE extends Role(0) with Role.Recognized { val index = 0 val name = "UNKNOWN_ROLE" override def isUnknownRole: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object REFERENCE extends Role(1) with Role.Recognized { val index = 1 val name = "REFERENCE" override def isReference: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) case object DEFINITION extends Role(2) with Role.Recognized { val index = 2 val name = "DEFINITION" override def isDefinition: _root_.scala.Boolean = true } - + @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Role(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_ROLE, REFERENCE, DEFINITION) @@ -159,8 +159,8 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools case 2 => DEFINITION case __other => Unrecognized(__other) } - - + + } final val RANGE_FIELD_NUMBER = 1 final val SYMBOL_FIELD_NUMBER = 2 diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala index 3c6fcfbf4c6a..7916fdb2e07a 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -21,7 +21,7 @@ final case class Synthetic( val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree) if (__value.serializedSize != 0) { @@ -37,7 +37,7 @@ final case class Synthetic( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -59,10 +59,10 @@ final case class Synthetic( def clearRange: Synthetic = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): Synthetic = copy(range = Option(__v)) def withTree(__v: dotty.tools.dotc.semanticdb.Tree): Synthetic = copy(tree = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Synthetic]) } @@ -88,12 +88,12 @@ object Synthetic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(__tree.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.Synthetic( range = _root_.scala.None, tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala index f0347e86d9e3..80322ec45e0e 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -24,35 +24,35 @@ final case class TextDocument( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = schema.value if (__value != 0) { __size += SemanticdbOutputStream.computeEnumSize(1, __value) } }; - + { val __value = uri if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(2, __value) } }; - + { val __value = text if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(3, __value) } }; - + { val __value = md5 if (!__value.isEmpty) { __size += SemanticdbOutputStream.computeStringSize(11, __value) } }; - + { val __value = language.value if (__value != 0) { @@ -84,7 +84,7 @@ final case class TextDocument( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -163,10 +163,10 @@ final case class TextDocument( def addSynthetics(__vs: dotty.tools.dotc.semanticdb.Synthetic *): TextDocument = addAllSynthetics(__vs) def addAllSynthetics(__vs: Iterable[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = synthetics ++ __vs) def withSynthetics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocument]) } @@ -220,12 +220,12 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot synthetics = __synthetics.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocument( schema = dotty.tools.dotc.semanticdb.Schema.LEGACY, uri = "", diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala index 41b8e1b3f491..a35bc23bf665 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable @SerialVersionUID(0L) @@ -29,7 +29,7 @@ final case class TextDocuments( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { documents.foreach { __v => @@ -43,10 +43,10 @@ final case class TextDocuments( def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument *): TextDocuments = addAllDocuments(__vs) def addAllDocuments(__vs: Iterable[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = documents ++ __vs) def withDocuments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocuments]) } @@ -68,12 +68,12 @@ object TextDocuments extends SemanticdbGeneratedMessageCompanion[dotty.tools.do documents = __documents.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocuments( documents = _root_.scala.Seq.empty ) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala index ed84d9b2f2d0..6a19494cd65a 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual { object Tree { case object Empty extends dotty.tools.dotc.semanticdb.Tree - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Tree def defaultInstance: dotty.tools.dotc.semanticdb.Tree = Empty - + implicit val TreeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] { override def toCustom(__base: dotty.tools.dotc.semanticdb.TreeMessage): dotty.tools.dotc.semanticdb.Tree = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree => __v.value @@ -96,7 +96,7 @@ final case class TreeMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.applyTree.foreach { __v => @@ -166,10 +166,10 @@ final case class TreeMessage( def withTypeApplyTree(__v: dotty.tools.dotc.semanticdb.TypeApplyTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__v)) def clearSealedValue: TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue): TreeMessage = copy(sealedValue = __v) - - - - + + + + def toTree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Tree]) } @@ -206,12 +206,12 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TreeMessage( sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty ) @@ -244,7 +244,7 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class ApplyTree(value: dotty.tools.dotc.semanticdb.ApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.ApplyTree @@ -327,7 +327,7 @@ final case class ApplyTree( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function) if (__value.serializedSize != 0) { @@ -347,7 +347,7 @@ final case class ApplyTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -370,10 +370,10 @@ final case class ApplyTree( def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree *): ApplyTree = addAllArguments(__vs) def addAllArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = arguments ++ __vs) def withArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ApplyTree]) } @@ -399,12 +399,12 @@ object ApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s arguments = __arguments.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ApplyTree( function = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), arguments = _root_.scala.Seq.empty @@ -438,7 +438,7 @@ final case class FunctionTree( val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body) if (__value.serializedSize != 0) { @@ -454,7 +454,7 @@ final case class FunctionTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { parameters.foreach { __v => @@ -477,10 +477,10 @@ final case class FunctionTree( def addAllParameters(__vs: Iterable[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = parameters ++ __vs) def withParameters(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = __v) def withBody(__v: dotty.tools.dotc.semanticdb.Tree): FunctionTree = copy(body = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FunctionTree]) } @@ -506,12 +506,12 @@ object FunctionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.FunctionTree( parameters = _root_.scala.Seq.empty, body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) @@ -538,7 +538,7 @@ final case class IdTree( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -554,7 +554,7 @@ final case class IdTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -565,10 +565,10 @@ final case class IdTree( }; } def withSymbol(__v: _root_.scala.Predef.String): IdTree = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IdTree]) } @@ -590,12 +590,12 @@ object IdTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sema symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.IdTree( symbol = "" ) @@ -616,7 +616,7 @@ final case class LiteralTree( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant) if (__value.serializedSize != 0) { @@ -632,7 +632,7 @@ final case class LiteralTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -645,10 +645,10 @@ final case class LiteralTree( }; } def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): LiteralTree = copy(constant = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LiteralTree]) } @@ -670,12 +670,12 @@ object LiteralTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LiteralTree( constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) @@ -699,14 +699,14 @@ final case class MacroExpansionTree( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -722,7 +722,7 @@ final case class MacroExpansionTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -744,10 +744,10 @@ final case class MacroExpansionTree( } def withBeforeExpansion(__v: dotty.tools.dotc.semanticdb.Tree): MacroExpansionTree = copy(beforeExpansion = __v) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): MacroExpansionTree = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MacroExpansionTree]) } @@ -773,12 +773,12 @@ object MacroExpansionTree extends SemanticdbGeneratedMessageCompanion[dotty.too tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MacroExpansionTree( beforeExpansion = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -820,7 +820,7 @@ final case class OriginalTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { range.foreach { __v => @@ -833,10 +833,10 @@ final case class OriginalTree( def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: OriginalTree = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): OriginalTree = copy(range = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.OriginalTree]) } @@ -858,12 +858,12 @@ object OriginalTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot range = __range ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.OriginalTree( range = _root_.scala.None ) @@ -885,7 +885,7 @@ final case class SelectTree( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier) if (__value.serializedSize != 0) { @@ -905,7 +905,7 @@ final case class SelectTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -927,10 +927,10 @@ final case class SelectTree( def getId: dotty.tools.dotc.semanticdb.IdTree = id.getOrElse(dotty.tools.dotc.semanticdb.IdTree.defaultInstance) def clearId: SelectTree = copy(id = _root_.scala.None) def withId(__v: dotty.tools.dotc.semanticdb.IdTree): SelectTree = copy(id = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SelectTree]) } @@ -956,12 +956,12 @@ object SelectTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. id = __id ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SelectTree( qualifier = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), id = _root_.scala.None @@ -989,7 +989,7 @@ final case class TypeApplyTree( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function) if (__value.serializedSize != 0) { @@ -1009,7 +1009,7 @@ final case class TypeApplyTree( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1032,10 +1032,10 @@ final case class TypeApplyTree( def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeApplyTree = addAllTypeArguments(__vs) def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = typeArguments ++ __vs) def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeApplyTree]) } @@ -1061,12 +1061,12 @@ object TypeApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.do typeArguments = __typeArguments.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeApplyTree( function = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), typeArguments = _root_.scala.Seq.empty diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala index be9cc6034f2c..8f675e82b802 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala @@ -5,7 +5,7 @@ // Protofile syntax: PROTO3 package dotty.tools.dotc.semanticdb -import dotty.tools.dotc.semanticdb.internal._ +import dotty.tools.dotc.semanticdb.internal.* import scala.annotation.internal.sharable sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual { @@ -18,10 +18,10 @@ sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual { object Type { case object Empty extends dotty.tools.dotc.semanticdb.Type - + sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Type def defaultInstance: dotty.tools.dotc.semanticdb.Type = Empty - + implicit val TypeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] { override def toCustom(__base: dotty.tools.dotc.semanticdb.TypeMessage): dotty.tools.dotc.semanticdb.Type = __base.sealedValue match { case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef => __v.value @@ -144,7 +144,7 @@ final case class TypeMessage( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { sealedValue.typeRef.foreach { __v => @@ -278,10 +278,10 @@ final case class TypeMessage( def withLambdaType(__v: dotty.tools.dotc.semanticdb.LambdaType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v)) def clearSealedValue: TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty) def withSealedValue(__v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue): TypeMessage = copy(sealedValue = __v) - - - - + + + + def toType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Type]) } @@ -334,12 +334,12 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc sealedValue = __sealedValue ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeMessage( sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty ) @@ -388,7 +388,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") } - + @SerialVersionUID(0L) final case class TypeRef(value: dotty.tools.dotc.semanticdb.TypeRef) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { type ValueType = dotty.tools.dotc.semanticdb.TypeRef @@ -536,14 +536,14 @@ final case class TypeRef( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = symbol if (!__value.isEmpty) { @@ -563,7 +563,7 @@ final case class TypeRef( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -593,10 +593,10 @@ final case class TypeRef( def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeRef = addAllTypeArguments(__vs) def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = typeArguments ++ __vs) def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeRef]) } @@ -626,12 +626,12 @@ object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sem typeArguments = __typeArguments.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeRef( prefix = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol = "", @@ -665,14 +665,14 @@ final case class SingleType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = symbol if (!__value.isEmpty) { @@ -688,7 +688,7 @@ final case class SingleType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -708,10 +708,10 @@ final case class SingleType( } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SingleType = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): SingleType = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SingleType]) } @@ -737,12 +737,12 @@ object SingleType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SingleType( prefix = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol = "" @@ -769,7 +769,7 @@ final case class ThisType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = symbol if (!__value.isEmpty) { @@ -785,7 +785,7 @@ final case class ThisType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -796,10 +796,10 @@ final case class ThisType( }; } def withSymbol(__v: _root_.scala.Predef.String): ThisType = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ThisType]) } @@ -821,12 +821,12 @@ object ThisType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ThisType( symbol = "" ) @@ -848,14 +848,14 @@ final case class SuperType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = symbol if (!__value.isEmpty) { @@ -871,7 +871,7 @@ final case class SuperType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -891,10 +891,10 @@ final case class SuperType( } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SuperType = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): SuperType = copy(symbol = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SuperType]) } @@ -920,12 +920,12 @@ object SuperType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s symbol = __symbol ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.SuperType( prefix = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol = "" @@ -952,7 +952,7 @@ final case class ConstantType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant) if (__value.serializedSize != 0) { @@ -968,7 +968,7 @@ final case class ConstantType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -981,10 +981,10 @@ final case class ConstantType( }; } def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): ConstantType = copy(constant = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ConstantType]) } @@ -1006,12 +1006,12 @@ object ConstantType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantType( constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) @@ -1047,7 +1047,7 @@ final case class IntersectionType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1061,10 +1061,10 @@ final case class IntersectionType( def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): IntersectionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntersectionType]) } @@ -1086,12 +1086,12 @@ object IntersectionType extends SemanticdbGeneratedMessageCompanion[dotty.tools types = __types.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntersectionType( types = _root_.scala.Seq.empty ) @@ -1127,7 +1127,7 @@ final case class UnionType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1141,10 +1141,10 @@ final case class UnionType( def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): UnionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnionType]) } @@ -1166,12 +1166,12 @@ object UnionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s types = __types.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnionType( types = _root_.scala.Seq.empty ) @@ -1207,7 +1207,7 @@ final case class WithType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { types.foreach { __v => @@ -1221,10 +1221,10 @@ final case class WithType( def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): WithType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = types ++ __vs) def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.WithType]) } @@ -1246,12 +1246,12 @@ object WithType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se types = __types.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.WithType( types = _root_.scala.Seq.empty ) @@ -1275,7 +1275,7 @@ final case class StructuralType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1295,7 +1295,7 @@ final case class StructuralType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1317,10 +1317,10 @@ final case class StructuralType( def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: StructuralType = copy(declarations = _root_.scala.None) def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): StructuralType = copy(declarations = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StructuralType]) } @@ -1346,12 +1346,12 @@ object StructuralType extends SemanticdbGeneratedMessageCompanion[dotty.tools.d declarations = __declarations ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.StructuralType( tpe = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations = _root_.scala.None @@ -1383,7 +1383,7 @@ final case class AnnotatedType( val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } - + { val __value = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1399,7 +1399,7 @@ final case class AnnotatedType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1422,10 +1422,10 @@ final case class AnnotatedType( def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = annotations ++ __vs) def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = __v) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): AnnotatedType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.AnnotatedType]) } @@ -1451,12 +1451,12 @@ object AnnotatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.AnnotatedType( annotations = _root_.scala.Seq.empty, tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -1484,7 +1484,7 @@ final case class ExistentialType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1504,7 +1504,7 @@ final case class ExistentialType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1526,10 +1526,10 @@ final case class ExistentialType( def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: ExistentialType = copy(declarations = _root_.scala.None) def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ExistentialType = copy(declarations = Option(__v)) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ExistentialType]) } @@ -1555,12 +1555,12 @@ object ExistentialType extends SemanticdbGeneratedMessageCompanion[dotty.tools. declarations = __declarations ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ExistentialType( tpe = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations = _root_.scala.None @@ -1592,7 +1592,7 @@ final case class UniversalType( val __value = typeParameters.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1608,7 +1608,7 @@ final case class UniversalType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1630,10 +1630,10 @@ final case class UniversalType( def clearTypeParameters: UniversalType = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): UniversalType = copy(typeParameters = Option(__v)) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): UniversalType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UniversalType]) } @@ -1659,12 +1659,12 @@ object UniversalType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.UniversalType( typeParameters = _root_.scala.None, tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -1691,7 +1691,7 @@ final case class ByNameType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1707,7 +1707,7 @@ final case class ByNameType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1720,10 +1720,10 @@ final case class ByNameType( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ByNameType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByNameType]) } @@ -1745,12 +1745,12 @@ object ByNameType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByNameType( tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) @@ -1773,7 +1773,7 @@ final case class RepeatedType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe) if (__value.serializedSize != 0) { @@ -1789,7 +1789,7 @@ final case class RepeatedType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1802,10 +1802,10 @@ final case class RepeatedType( }; } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): RepeatedType = copy(tpe = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.RepeatedType]) } @@ -1827,12 +1827,12 @@ object RepeatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.RepeatedType( tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) @@ -1856,7 +1856,7 @@ final case class MatchType( private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toBase(scrutinee) if (__value.serializedSize != 0) { @@ -1876,7 +1876,7 @@ final case class MatchType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1899,10 +1899,10 @@ final case class MatchType( def addCases(__vs: dotty.tools.dotc.semanticdb.MatchType.CaseType *): MatchType = addAllCases(__vs) def addAllCases(__vs: Iterable[dotty.tools.dotc.semanticdb.MatchType.CaseType]): MatchType = copy(cases = cases ++ __vs) def withCases(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.MatchType.CaseType]): MatchType = copy(cases = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType]) } @@ -1928,12 +1928,12 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s cases = __cases.result() ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MatchType( scrutinee = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), cases = _root_.scala.Seq.empty @@ -1947,14 +1947,14 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 private[this] def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 - + { val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toBase(key) if (__value.serializedSize != 0) { __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } }; - + { val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toBase(body) if (__value.serializedSize != 0) { @@ -1970,7 +1970,7 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { { @@ -1992,13 +1992,13 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s } def withKey(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(key = __v) def withBody(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(body = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType.CaseType]) } - + object CaseType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] { implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] = this def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType.CaseType = { @@ -2021,12 +2021,12 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.MatchType.CaseType( key = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) @@ -2046,7 +2046,7 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType]) } - + final val SCRUTINEE_FIELD_NUMBER = 1 final val CASES_FIELD_NUMBER = 2 @transient @sharable @@ -2074,7 +2074,7 @@ final case class LambdaType( val __value = parameters.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; - + { val __value = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) if (__value.serializedSize != 0) { @@ -2090,7 +2090,7 @@ final case class LambdaType( __serializedSizeMemoized = __size } __size - 1 - + } def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { parameters.foreach { __v => @@ -2112,10 +2112,10 @@ final case class LambdaType( def clearParameters: LambdaType = copy(parameters = _root_.scala.None) def withParameters(__v: dotty.tools.dotc.semanticdb.Scope): LambdaType = copy(parameters = Option(__v)) def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): LambdaType = copy(returnType = __v) - - - - + + + + // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LambdaType]) } @@ -2141,12 +2141,12 @@ object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) } - - - - - - + + + + + + lazy val defaultInstance = dotty.tools.dotc.semanticdb.LambdaType( parameters = _root_.scala.None, returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala index 8aed9e5b9771..699f85c0e303 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala @@ -7,7 +7,7 @@ import java.io.InputStream import java.util.Arrays import java.nio.charset.StandardCharsets -import SemanticdbInputStream._ +import SemanticdbInputStream.* import scala.collection.mutable diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala index 37da7f868e25..359e861225b0 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala @@ -7,7 +7,7 @@ import java.io.OutputStream import java.nio.ByteBuffer import java.nio.charset.StandardCharsets -import SemanticdbOutputStream._ +import SemanticdbOutputStream.* object SemanticdbOutputStream { diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala index 89d8f9a80ee3..85f2e84429c3 100644 --- a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala +++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala @@ -2,19 +2,19 @@ package dotty.tools.dotc package staging import dotty.tools.dotc.ast.{tpd, untpd} -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.NameKinds._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.NameKinds.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.quoted.QuotePatterns import dotty.tools.dotc.staging.QuoteTypeTags.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.util.Property -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.util.SrcPos /** Checks that staging level consistency holds and heals staged types. @@ -52,7 +52,7 @@ import dotty.tools.dotc.util.SrcPos * */ class CrossStageSafety extends TreeMapWithStages { - import tpd._ + import tpd.* private val InAnnotation = Property.Key[Unit]() diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 7d3ca0ad2f63..2469bd73bdcb 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package staging -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.staging.QuoteTypeTags.* -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.typer.Implicits.SearchFailureType import dotty.tools.dotc.util.SrcPos diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala index 0b5032ea5a6d..467f1f440fd6 100644 --- a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala +++ b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc.staging import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.util.Property diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala index 05b3efab408c..0a229881804e 100644 --- a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala +++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package staging -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.util.SrcPos diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala index 674dfff2f642..c2607f3daa68 100644 --- a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala +++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala @@ -3,16 +3,16 @@ package staging import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd} import dotty.tools.dotc.config.Printers.staging -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.staging.StagingLevel.* import scala.collection.mutable /** TreeMap that keeps track of staging levels using StagingLevel. */ abstract class TreeMapWithStages extends TreeMapWithImplicits { - import tpd._ + import tpd.* override def transform(tree: Tree)(using Context): Tree = if (tree.source != ctx.source && tree.source.exists) diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 3175ffceae49..c5ffde140bd6 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Symbols._ -import Flags._ -import Names._ -import NameOps._ -import Decorators._ -import TypeUtils._ -import Types._ +import core.* +import Contexts.* +import Symbols.* +import Flags.* +import Names.* +import NameOps.* +import Decorators.* +import TypeUtils.* +import Types.* import util.Spans.Span import config.Printers.transforms @@ -18,8 +18,8 @@ import config.Printers.transforms * inline accessors and protected accessors. */ abstract class AccessProxies { - import ast.tpd._ - import AccessProxies._ + import ast.tpd.* + import AccessProxies.* /** accessor -> accessed */ private val accessedBy = MutableSymbolMap[Symbol]() @@ -64,7 +64,7 @@ abstract class AccessProxies { } trait Insert { - import ast.tpd._ + import ast.tpd.* /** The name of the accessor for definition with given `name` in given `site` */ def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala index 872c7cc897de..6ece8ad63808 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Symbols._ -import Flags._ -import StdNames._ +import core.* +import MegaPhase.* +import Contexts.* +import Symbols.* +import Flags.* +import StdNames.* import dotty.tools.dotc.ast.tpd @@ -16,7 +16,7 @@ import dotty.tools.dotc.ast.tpd * Transforms `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` */ class ArrayApply extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = ArrayApply.name diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala index e783961649dd..b0106f0d2ff3 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Symbols._ -import Types._ -import StdNames._ +import core.* +import MegaPhase.* +import Contexts.* +import Symbols.* +import Types.* +import StdNames.* import dotty.tools.dotc.ast.tpd @@ -19,7 +19,7 @@ import scala.collection.immutable.:: * Additionally it optimizes calls to scala.Array.ofDim functions by replacing them with calls to newArray with specific dimensions */ class ArrayConstructors extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ArrayConstructors.name diff --git a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala index 0c1f40d4f2bd..7e21703f67ee 100644 --- a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala +++ b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala @@ -1,19 +1,19 @@ package dotty.tools.dotc package transform -import core._ -import ast.tpd._ -import Annotations._ -import Contexts._ +import core.* +import ast.tpd.* +import Annotations.* +import Contexts.* import Symbols.* import SymUtils.* -import Decorators._ -import Flags._ -import Names._ -import Types._ -import util.Spans._ +import Decorators.* +import Flags.* +import Names.* +import Types.* +import util.Spans.* -import DenotTransformers._ +import DenotTransformers.* class BeanProperties(thisPhase: DenotTransformer): def addBeanMethods(impl: Template)(using Context): Template = diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index ad36544cdec0..653a5e17990f 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package transform -import core._ -import Flags._ -import MegaPhase._ -import Symbols._, Contexts._, Types._, Decorators._ +import core.* +import Flags.* +import MegaPhase.* +import Symbols.*, Contexts.*, Types.*, Decorators.* import StdNames.nme import ast.TreeTypeMap @@ -33,7 +33,7 @@ import scala.collection.mutable.ListBuffer * return context functions. See i6375.scala. */ class BetaReduce extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = BetaReduce.name @@ -45,7 +45,7 @@ class BetaReduce extends MiniPhase: app1 object BetaReduce: - import ast.tpd._ + import ast.tpd.* val name: String = "betaReduce" val description: String = "reduce closure applications" diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 94f7b405c027..2110ac1464c2 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package transform -import core._ -import Symbols._, Types._, Contexts._, Decorators._, Flags._, Scopes._, Phases._ -import DenotTransformers._ +import core.* +import Symbols.*, Types.*, Contexts.*, Decorators.*, Flags.*, Scopes.*, Phases.* +import DenotTransformers.* import ast.untpd import collection.{mutable, immutable} import util.SrcPos @@ -16,7 +16,7 @@ import Erasure.Boxing.adaptClosure /** A helper class for generating bridge methods in class `root`. */ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { - import ast.tpd._ + import ast.tpd.* assert(ctx.phase == erasurePhase.next) private val preErasureCtx = ctx.withPhase(erasurePhase) diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 202e3d72fa25..c1725cbd0255 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import core.Names._ +import core.Names.* import core.NameKinds.TempResultName -import core.Constants._ +import core.Constants.* import util.Store import dotty.tools.uncheckedNN import ast.tpd.* diff --git a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala index 7c8082265161..8625d2dbb289 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala @@ -30,7 +30,7 @@ object CheckLoopingImplicits: */ class CheckLoopingImplicits extends MiniPhase: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = CheckLoopingImplicits.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala index f43d000bbf44..127bd57b1bf2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala @@ -15,7 +15,7 @@ object CheckNoSuperThis: /** Checks that super and this calls do not pass `this` as (part of) an argument. */ class CheckNoSuperThis extends MiniPhase: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = CheckNoSuperThis.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index b63773687f74..073086ac5e2c 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ -import Decorators._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* +import Decorators.* /** A no-op transform that checks whether the compiled sources are re-entrant. * If -Ycheck:reentrant is set, the phase makes sure that there are no variables @@ -27,7 +27,7 @@ import Decorators._ * for immutable array. */ class CheckReentrant extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = CheckReentrant.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala index ae69c1596009..fdc055df9ac4 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -138,7 +138,7 @@ class CheckShadowing extends MiniPhase: } private def nestedTypeTraverser(parent: Symbol) = new TreeTraverser: - import tpd._ + import tpd.* override def traverse(tree: tpd.Tree)(using Context): Unit = tree match @@ -154,7 +154,7 @@ class CheckShadowing extends MiniPhase: // To reach the imports during a miniphase traversal private def importTraverser = new TreeTraverser: - import tpd._ + import tpd.* override def traverse(tree: tpd.Tree)(using Context): Unit = tree match @@ -173,7 +173,7 @@ object CheckShadowing: val description = "check for elements shadowing other elements in scope" private class ShadowingData: - import dotty.tools.dotc.transform.CheckShadowing.ShadowingData._ + import dotty.tools.dotc.transform.CheckShadowing.ShadowingData.* import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack} private val rootImports = MutSet[SingleDenotation]() diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala index 0d5154e212ee..2b616bad0a01 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* import dotty.tools.dotc.ast.tpd -import reporting._ +import reporting.* -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* /** A transformer that check that requirements of Static fields\methods are implemented: * 1. Only objects can have members annotated with `@static` @@ -24,7 +24,7 @@ import dotty.tools.dotc.transform.SymUtils._ * 6. `@static` Lazy vals are currently unsupported. */ class CheckStatic extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = CheckStatic.name diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 073626b4b5c6..7cff6fa5f1f0 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -206,7 +206,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke * corresponding context property */ private def traverser = new TreeTraverser: - import tpd._ + import tpd.* import UnusedData.ScopeType /* Register every imports, definition and usage */ @@ -607,14 +607,14 @@ object CheckUnused: * package a: * val x: Int = 0 * package b: - * import a._ // no warning + * import a.* // no warning * }}} * --- WITH OBJECT : OK --- * {{{ * object a: * val x: Int = 0 * object b: - * import a._ // unused warning + * import a.* // unused warning * }}} */ private def isConstructorOfSynth(sym: Symbol)(using Context): Boolean = @@ -643,8 +643,8 @@ object CheckUnused: sel.isWildcard || imp.expr.tpe.member(sel.name.toTermName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) || imp.expr.tpe.member(sel.name.toTypeName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) - ) - + ) + /** * Ignore CanEqual imports */ @@ -655,7 +655,7 @@ object CheckUnused: /** * Ignore definitions of CanEqual given - */ + */ private def isDefIgnored(memDef: tpd.MemberDef)(using Context): Boolean = memDef.symbol.isOneOf(GivenOrImplicit) && memDef.symbol.typeRef.baseClasses.exists(_.derivesFrom(defn.CanEqualClass)) diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala index 179625759b10..5534947c6799 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ +import core.* import ast.tpd -import MegaPhase._ -import Contexts._ -import Symbols._ -import Phases._ +import MegaPhase.* +import Contexts.* +import Symbols.* +import Phases.* import dotty.tools.io.JarArchive import dotty.tools.backend.jvm.GenBCode diff --git a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala index 7b89c8785e05..22739dc528c8 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.transform.MegaPhase.MiniPhase -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import scala.collection.mutable @@ -40,7 +40,7 @@ object CollectNullableFields { * - defined in the same class as the lazy val */ class CollectNullableFields extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = CollectNullableFields.name diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index b7e8ccf4e7e1..eac0b9f05c60 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package transform -import core._ -import Names._ +import core.* +import Names.* import StdNames.nme -import Types._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ -import Constants._ -import Decorators._ -import DenotTransformers._ -import SymUtils._ +import Types.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* +import Constants.* +import Decorators.* +import DenotTransformers.* +import SymUtils.* object CompleteJavaEnums { @@ -29,8 +29,8 @@ object CompleteJavaEnums { * case to the java.lang.Enum class. */ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => - import CompleteJavaEnums._ - import ast.tpd._ + import CompleteJavaEnums.* + import ast.tpd.* override def phaseName: String = CompleteJavaEnums.name diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala index 4dd7205e4ee0..9df98292fe84 100644 --- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala @@ -1,19 +1,19 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StdNames._ -import ast._ -import Flags._ +import core.* +import MegaPhase.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.StdNames.* +import ast.* +import Flags.* import Names.Name -import NameOps._ +import NameOps.* import NameKinds.{FieldName, ExplicitFieldName} -import SymUtils._ -import Symbols._ -import Decorators._ -import DenotTransformers._ +import SymUtils.* +import Symbols.* +import Decorators.* +import DenotTransformers.* import collection.mutable object Constructors { @@ -28,7 +28,7 @@ object Constructors { * into the constructor if possible. */ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase => - import tpd._ + import tpd.* override def phaseName: String = Constructors.name @@ -352,7 +352,7 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = val expandedConstr = if (cls.isAllOf(NoInitsTrait)) { assert(finalConstrStats.isEmpty || { - import dotty.tools.dotc.transform.sjs.JSSymUtils._ + import dotty.tools.dotc.transform.sjs.JSSymUtils.* ctx.settings.scalajs.value && cls.isJSType }) constr diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index 01a77427698a..4719b5f5f6f3 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Symbols._, Types._, Annotations._, Constants._, Phases._ +import core.* +import Contexts.*, Symbols.*, Types.*, Annotations.*, Constants.*, Phases.* import StdNames.nme import ast.untpd -import ast.tpd._ +import ast.tpd.* import config.Config object ContextFunctionResults: diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala index 27f34891fc2c..d443e31fdc39 100644 --- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala +++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.ContextOps._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.ContextOps.* import dotty.tools.dotc.typer.Docstrings class CookComments extends MegaPhase.MiniPhase { diff --git a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala index 91b5bc6a3de4..b5c02347d5d2 100644 --- a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala +++ b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core._ +import core.* import MegaPhase.MiniPhase -import dotty.tools.dotc.core.Contexts._ -import ast._ -import Flags._ -import Symbols._ +import dotty.tools.dotc.core.Contexts.* +import ast.* +import Flags.* +import Symbols.* import ExplicitOuter.isOuterParamAccessor import collection.mutable @@ -33,7 +33,7 @@ object CountOuterAccesses: */ class CountOuterAccesses extends MiniPhase: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = CountOuterAccesses.name diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala index 4f8f1b195bb3..89161cc8c013 100644 --- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala +++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package transform -import core.Contexts._ +import core.Contexts.* import scala.compiletime.uninitialized diff --git a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala index 46285c324307..9b1b931e0320 100644 --- a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala +++ b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala @@ -14,7 +14,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * `freeVars`, `tracked`, and `logicalOwner`. */ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Context): - import ast.tpd._ + import ast.tpd.* /** The symbol is a method or a lazy val that will be mapped to a method */ protected def isExpr(sym: Symbol)(using Context): Boolean diff --git a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala index 9f94a8c13a52..5f26a6af6c3c 100644 --- a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala +++ b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala @@ -6,9 +6,9 @@ import ast.{Trees, tpd} import core.* import Decorators.* import NameKinds.BoundaryName -import MegaPhase._ -import Types._, Contexts._, Flags._, DenotTransformers._ -import Symbols._, StdNames._, Trees._ +import MegaPhase.* +import Types.*, Contexts.*, Flags.*, DenotTransformers.* +import Symbols.*, StdNames.*, Trees.* import util.Property import Constants.Constant import Flags.MethodOrLazy @@ -41,7 +41,7 @@ object DropBreaks: class DropBreaks extends MiniPhase: import DropBreaks.* - import tpd._ + import tpd.* override def phaseName: String = DropBreaks.name diff --git a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled index 13adcf5c3f76..1a5cb0dfba47 100644 --- a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.SymTransformer import Phases.Phase -import Contexts._ -import Flags._ -import Symbols._ +import Contexts.* +import Flags.* +import Symbols.* import SymDenotations.SymDenotation -import ast.Trees._ +import ast.Trees.* import collection.mutable -import Decorators._ -import NameOps._ +import Decorators.* +import NameOps.* import MegaPhase.MiniPhase import dotty.tools.dotc.transform.MegaPhase.TransformerInfo @@ -29,7 +29,7 @@ import dotty.tools.dotc.transform.MegaPhase.TransformerInfo * at their destination. */ class DropEmptyCompanions extends MiniPhase { thisTransform => - import ast.tpd._ + import ast.tpd.* override def phaseName = "dropEmptyCompanions" override def runsAfter = Set(Flatten.name) diff --git a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala index a363ccaeb0d0..b3bd1ab8dd26 100644 --- a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ +import core.* import MegaPhase.MiniPhase -import dotty.tools.dotc.core.Contexts._ -import ast._ -import Flags._ -import Symbols._ -import Contexts._ -import Decorators._ -import DenotTransformers._ +import dotty.tools.dotc.core.Contexts.* +import ast.* +import Flags.* +import Symbols.* +import Contexts.* +import Decorators.* +import DenotTransformers.* import ExplicitOuter.isOuterParamAccessor import CountOuterAccesses.mightBeDropped import collection.mutable @@ -24,7 +24,7 @@ object DropOuterAccessors: */ class DropOuterAccessors extends MiniPhase with IdentityDenotTransformer: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = DropOuterAccessors.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala index 151e841f0e48..eca3928569f1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Symbols._ -import Types._ -import Flags._ +import core.* +import Contexts.* +import Symbols.* +import Types.* +import Flags.* import SymDenotations.* import DenotTransformers.InfoTransformer import NameKinds.SuperArgName @@ -53,7 +53,7 @@ import dotty.tools.dotc.core.Names.Name class ElimByName extends MiniPhase, InfoTransformer: thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimByName.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala index 503561915040..0b0906148ba1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala @@ -3,12 +3,12 @@ package dotc package transform import ast.{Trees, tpd} -import core._, core.Decorators._ -import MegaPhase._ -import Types._, Contexts._, Flags._, DenotTransformers._, Phases._ -import Symbols._, StdNames._, Trees._ -import TypeErasure.ErasedValueType, ValueClasses._ -import reporting._ +import core.*, core.Decorators.* +import MegaPhase.* +import Types.*, Contexts.*, Flags.*, DenotTransformers.*, Phases.* +import Symbols.*, StdNames.*, Trees.* +import TypeErasure.ErasedValueType, ValueClasses.* +import reporting.* import NameKinds.SuperAccessorName object ElimErasedValueType { @@ -36,7 +36,7 @@ object ElimErasedValueType { */ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => - import tpd._ + import tpd.* import ElimErasedValueType.elimEVT override def phaseName: String = ElimErasedValueType.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala index 2f55826ec2a3..0ee8781b6b70 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* import Denotations.{SingleDenotation, NonSymSingleDenotation} import SymDenotations.SymDenotation -import DenotTransformers._ -import Names._ +import DenotTransformers.* +import Names.* object ElimOpaque { val name: String = "elimOpaque" @@ -21,7 +21,7 @@ object ElimOpaque { /** Rewrites opaque type aliases to normal alias types */ class ElimOpaque extends MiniPhase with DenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimOpaque.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala index 3ddc8b614bae..8527ad26e51b 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ +import core.* import MegaPhase.MiniPhase -import Contexts._ -import Types._ +import Contexts.* +import Types.* import NameKinds.OuterSelectName /** This phase rewrites outer selects `E.n_` which were introduced by * inlining to outer paths. */ class ElimOuterSelect extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimOuterSelect.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala index 83349f1f6199..6c577a872c56 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._, Flags._, Types._, Contexts._, Symbols._ -import ast.tpd._ -import Flags._ +import core.* +import Decorators.*, Flags.*, Types.*, Contexts.*, Symbols.* +import ast.tpd.* +import Flags.* import MegaPhase.MiniPhase /** Eliminates syntactic references to package terms as prefixes of classes, so that there's no chance diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala index 756ddd9bf0eb..3ed337ee3a4a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala @@ -2,11 +2,11 @@ package dotty.tools.dotc package transform import ast.{Trees, tpd} -import core._, core.Decorators._ -import MegaPhase._, Phases.Phase -import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._ -import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._ -import TypeErasure.ErasedValueType, ValueClasses._ +import core.*, core.Decorators.* +import MegaPhase.*, Phases.Phase +import Types.*, Contexts.*, Constants.*, Names.*, NameOps.*, Flags.*, DenotTransformers.* +import SymDenotations.*, Symbols.*, StdNames.*, Annotations.*, Trees.*, Scopes.*, Denotations.* +import TypeErasure.ErasedValueType, ValueClasses.* /** This phase rewrite PolyFunction subclasses to FunctionN subclasses * @@ -20,7 +20,7 @@ import TypeErasure.ErasedValueType, ValueClasses._ */ class ElimPolyFunction extends MiniPhase with DenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = ElimPolyFunction.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index 359b882ef26b..b98d7d525089 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package transform -import core._ +import core.* import StdNames.nme -import Types._ -import transform.MegaPhase._ -import Flags._ -import Contexts._ -import Symbols._ -import Decorators._ -import Denotations._, SymDenotations._ -import DenotTransformers._ -import NullOpsDecorator._ +import Types.* +import transform.MegaPhase.* +import Flags.* +import Contexts.* +import Symbols.* +import Decorators.* +import Denotations.*, SymDenotations.* +import DenotTransformers.* +import NullOpsDecorator.* object ElimRepeated { val name: String = "elimRepeated" @@ -24,7 +24,7 @@ object ElimRepeated { * the transformed type if needed. */ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimRepeated.name diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala index 02612253c735..e2940532d463 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ -import Flags._ +import core.* +import Contexts.* +import Flags.* import dotty.tools.dotc.ast.tpd import MegaPhase.MiniPhase import dotty.tools.dotc.core.Types.{ThisType, TermRef} @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Types.{ThisType, TermRef} * corresponding modules. */ class ElimStaticThis extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ElimStaticThis.name diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 0b52b1725c3e..5f0854d31455 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -2,39 +2,39 @@ package dotty.tools package dotc package transform -import core.Phases._ -import core.DenotTransformers._ -import core.Denotations._ -import core.SymDenotations._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Names._ -import core.StdNames._ -import core.NameOps._ +import core.Phases.* +import core.DenotTransformers.* +import core.Denotations.* +import core.SymDenotations.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Names.* +import core.StdNames.* +import core.NameOps.* import core.NameKinds.{AdaptedClosureName, BodyRetainerName, DirectMethName} import core.Scopes.newScopeWith -import core.Decorators._ -import core.Constants._ -import core.Definitions._ +import core.Decorators.* +import core.Constants.* +import core.Definitions.* import core.Annotations.BodyAnnotation import typer.NoChecking import inlines.Inlines -import typer.ProtoTypes._ +import typer.ProtoTypes.* import typer.ErrorReporting.errorTree import typer.Checking.checkValue -import core.TypeErasure._ -import core.Decorators._ +import core.TypeErasure.* +import core.Decorators.* import dotty.tools.dotc.ast.{tpd, untpd} import ast.TreeTypeMap import dotty.tools.dotc.core.{Constants, Flags} -import ValueClasses._ -import TypeUtils._ -import ContextFunctionResults._ -import ExplicitOuter._ +import ValueClasses.* +import TypeUtils.* +import ContextFunctionResults.* +import ExplicitOuter.* import core.Mode import util.Property -import reporting._ +import reporting.* class Erasure extends Phase with DenotTransformer { @@ -202,8 +202,8 @@ class Erasure extends Phase with DenotTransformer { } object Erasure { - import tpd._ - import TypeTestsCasts._ + import tpd.* + import TypeTestsCasts.* val name: String = "erasure" val description: String = "rewrite types to JVM model" @@ -541,7 +541,7 @@ object Erasure { end Boxing class Typer(erasurePhase: DenotTransformer) extends typer.ReTyper with NoChecking { - import Boxing._ + import Boxing.* def isErased(tree: Tree)(using Context): Boolean = tree match { case TypeApply(Select(qual, _), _) if tree.symbol == defn.Any_typeCast => diff --git a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala index cf62cffd4cdb..b8b10d355ede 100644 --- a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala @@ -28,7 +28,7 @@ import NameKinds.AdaptedClosureName * to performance degradation, and in some cases, stack overflows. */ class EtaReduce extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = EtaReduce.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala index 41e5b76ca874..fa2492a261d5 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala @@ -1,18 +1,18 @@ package dotty.tools.dotc package transform -import core._ +import core.* import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import Contexts._ -import Symbols._ -import Flags._ -import SymDenotations._ +import Contexts.* +import Symbols.* +import Flags.* +import SymDenotations.* -import Decorators._ -import MegaPhase._ +import Decorators.* +import MegaPhase.* import java.io.File.separatorChar -import ValueClasses._ +import ValueClasses.* /** Make private term members that are accessed from another class * non-private by resetting the Private flag and expanding their name. @@ -29,7 +29,7 @@ import ValueClasses._ * and https://github.com/lampepfl/dotty/issues/783 */ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ExpandPrivate.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index 6dae564041ee..4347cca7f9d9 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package transform -import core._ +import core.* import Scopes.newScope -import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._ -import MegaPhase._ +import Contexts.*, Symbols.*, Types.*, Flags.*, Decorators.*, StdNames.*, Constants.* +import MegaPhase.* import Names.TypeName -import SymUtils._ -import NullOpsDecorator._ +import SymUtils.* +import NullOpsDecorator.* import ast.untpd /** Expand SAM closures that cannot be represented by the JVM as lambdas to anonymous classes. @@ -38,7 +38,7 @@ object ExpandSAMs: case _ => false class ExpandSAMs extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ExpandSAMs.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index deb1f665c022..b197d23f0b94 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -2,19 +2,19 @@ package dotty.tools package dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Phases._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Phases.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import core.Names._ -import core.NameOps._ +import core.Names.* +import core.NameOps.* import core.NameKinds.SuperArgName -import SymUtils._ +import SymUtils.* import dotty.tools.dotc.ast.tpd import collection.mutable @@ -35,8 +35,8 @@ import scala.annotation.tailrec * needs to run after pattern matcher as it can add outer checks and force creation of $outer */ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => - import ExplicitOuter._ - import ast.tpd._ + import ExplicitOuter.* + import ast.tpd.* override def phaseName: String = ExplicitOuter.name @@ -122,7 +122,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => } object ExplicitOuter { - import ast.tpd._ + import ast.tpd.* val name: String = "explicitOuter" val description: String = "add accessors to outer classes from nested ones" diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala index a6f7a29accd7..cd62a55cb8dc 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._, Types._, MegaPhase._, ast.Trees._, Symbols._, Decorators._, Flags._ +import core.* +import Contexts.*, Types.*, MegaPhase.*, ast.Trees.*, Symbols.*, Decorators.*, Flags.* import SymUtils.* /** Transform references of the form @@ -20,7 +20,7 @@ import SymUtils.* * Also replaces idents referring to the self type with ThisTypes. */ class ExplicitSelf extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ExplicitSelf.name diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index 19124357a0bd..8b3a783745fb 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -5,17 +5,17 @@ package dotty.tools.dotc package transform -import dotty.tools.dotc.transform.MegaPhase._ -import ValueClasses._ +import dotty.tools.dotc.transform.MegaPhase.* +import ValueClasses.* import dotty.tools.dotc.ast.tpd import scala.collection.mutable -import core._ -import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._ -import SymDenotations._, Symbols._, StdNames._, Denotations._ +import core.* +import Types.*, Contexts.*, Names.*, Flags.*, DenotTransformers.*, Phases.* +import SymDenotations.*, Symbols.*, StdNames.*, Denotations.* import TypeErasure.{ valueErasure, ErasedValueType } import NameKinds.{ExtMethName, BodyRetainerName} -import Decorators._ -import TypeUtils._ +import Decorators.* +import TypeUtils.* /** * Perform Step 1 in the inline classes SIP: Creates extension methods for all @@ -38,8 +38,8 @@ import TypeUtils._ */ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParameterization { thisPhase => - import tpd._ - import ExtensionMethods._ + import tpd.* + import ExtensionMethods.* override def phaseName: String = ExtensionMethods.name diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 03639c8af689..ce14d3d3c457 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -1,21 +1,21 @@ package dotty.tools.dotc package transform -import core._ -import Names._ -import dotty.tools.dotc.transform.MegaPhase._ +import core.* +import Names.* +import dotty.tools.dotc.transform.MegaPhase.* import ast.untpd -import Flags._ -import Types._ +import Flags.* +import Types.* import Constants.Constant -import Contexts._ -import Symbols._ -import Decorators._ +import Contexts.* +import Symbols.* +import Decorators.* import scala.collection.mutable -import DenotTransformers._ -import NameOps._ +import DenotTransformers.* +import NameOps.* import NameKinds.OuterSelectName -import StdNames._ +import StdNames.* import TypeUtils.isErasedValueType import config.Feature import inlines.Inlines.inInlineMethod @@ -37,7 +37,7 @@ object FirstTransform { * if (false) A else B ==> B */ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = FirstTransform.name diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala index 84f3d35c80b7..31c31a0f16ed 100644 --- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala +++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.SymTransformer -import Contexts._ -import Flags._ +import Contexts.* +import Flags.* import SymDenotations.SymDenotation import collection.mutable import MegaPhase.MiniPhase @@ -14,7 +14,7 @@ import scala.compiletime.uninitialized /** Lift nested classes to toplevel */ class Flatten extends MiniPhase with SymTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Flatten.name diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala index 8ca600577244..eafbd68fe478 100644 --- a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -1,14 +1,14 @@ package dotty.tools.dotc package transform -import core._ -import Types._ -import Contexts._ -import Symbols._ -import Decorators._ -import TypeUtils._ +import core.* +import Types.* +import Contexts.* +import Symbols.* +import Decorators.* +import TypeUtils.* import StdNames.nme -import ast._ +import ast.* /** Provides methods to produce fully parameterized versions of instance methods, * where the `this` of the enclosing class is abstracted out in an extra leading @@ -49,7 +49,7 @@ import ast._ */ trait FullParameterization { - import tpd._ + import tpd.* /** If references to original symbol `referenced` from within fully parameterized method * `derived` should be rewired to some fully parameterized method, the rewiring target symbol, diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala index cc1c0048b68f..4cf176cfda3a 100644 --- a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala +++ b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import core._ +import core.* import Constants.Constant -import Contexts._ -import Flags._ -import Definitions._ -import DenotTransformers._ -import StdNames._ -import Symbols._ -import MegaPhase._ -import Types._ +import Contexts.* +import Flags.* +import Definitions.* +import DenotTransformers.* +import StdNames.* +import Symbols.* +import MegaPhase.* +import Types.* /** This phase adds forwarder for XXL functions `apply` methods that are implemented with a method @@ -23,7 +23,7 @@ import Types._ * is generated. */ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = FunctionXXLForwarders.name diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 21c212e2a28a..77f4f76c33ba 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -2,20 +2,20 @@ package dotty.tools package dotc package transform -import core.Annotations._ -import core.Contexts._ -import core.Phases._ +import core.Annotations.* +import core.Contexts.* +import core.Phases.* import core.Decorators.* import core.Definitions -import core.Flags._ +import core.Flags.* import core.Names.Name -import core.Symbols._ +import core.Symbols.* import core.TypeApplications.{EtaExpansion, TypeParamInfo} import core.TypeErasure.{erasedGlb, erasure, fullErasure, isGenericArrayElement, tupleArity} -import core.Types._ +import core.Types.* import core.classfile.ClassfileConstants -import SymUtils._ -import TypeUtils._ +import SymUtils.* +import TypeUtils.* import config.Printers.transforms import reporting.trace import java.lang.StringBuilder diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index ad06bfb0a504..eeb2e868ddc8 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.SymTransformer -import Contexts._ +import Contexts.* import SymDenotations.SymDenotation -import Types._ -import Symbols._ -import MegaPhase._ -import Flags._ -import ValueClasses._ -import SymUtils._ -import NameOps._ +import Types.* +import Symbols.* +import MegaPhase.* +import Flags.* +import ValueClasses.* +import SymUtils.* +import NameOps.* /** Performs the following rewritings for fields of a class: @@ -57,7 +57,7 @@ import NameOps._ * This allows subsequent code motions in Flatten. */ class Getters extends MiniPhase with SymTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Getters.name diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala index 9a36d65babe8..190150ca8a81 100644 --- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala +++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala @@ -1,18 +1,18 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* import ast.TreeTypeMap -import core.Types._ -import core.Flags._ -import core.Decorators._ +import core.Types.* +import core.Flags.* +import core.Decorators.* import collection.mutable -import ast.Trees._ +import ast.Trees.* import core.NameKinds.SuperArgName -import SymUtils._ +import SymUtils.* import core.Decorators.* object HoistSuperArgs { @@ -43,7 +43,7 @@ object HoistSuperArgs { * or, if that is a package, it is made a static method of the class itself. */ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = HoistSuperArgs.name diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala index 798f34757b35..18333ae506fd 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ -import MegaPhase._ -import Symbols._, Contexts._, Types._, Decorators._ -import NameOps._ -import Names._ +import core.* +import MegaPhase.* +import Symbols.*, Contexts.*, Types.*, Decorators.* +import NameOps.* +import Names.* import scala.collection.mutable.ListBuffer @@ -26,7 +26,7 @@ import scala.collection.mutable.ListBuffer * This removes placeholders added by inline `unapply`/`unapplySeq` patterns. */ class InlinePatterns extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = InlinePatterns.name diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala index 047a187bad68..cff1632ffcd2 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala @@ -2,17 +2,17 @@ package dotty.tools package dotc package transform -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.inlines.Inlines /** Check that `tree.rhs` can be right hand-side of an `inline` value definition. */ class InlineVals extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = InlineVals.name diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 10f73fa94e08..bfc44f868cb6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -1,14 +1,14 @@ package dotty.tools.dotc package transform -import core._ -import Flags._ -import Contexts._ -import Symbols._ -import SymUtils._ +import core.* +import Flags.* +import Contexts.* +import Symbols.* +import SymUtils.* import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.ast.Trees.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.ast.TreeMapWithImplicits import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer @@ -19,7 +19,7 @@ import scala.collection.mutable.ListBuffer /** Inlines all calls to inline methods that are not in an inline method or a quote */ class Inlining extends MacroTransform { - import tpd._ + import tpd.* override def phaseName: String = Inlining.name diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala index 9802de34a6e0..9f99e7a6fbd3 100644 --- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala +++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala @@ -2,14 +2,14 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Symbols._ -import Flags._ - -import Decorators._ -import MegaPhase._ -import Names._ +import core.* +import Contexts.* +import Symbols.* +import Flags.* + +import Decorators.* +import MegaPhase.* +import Names.* import Constants.Constant import scala.compiletime.uninitialized @@ -20,7 +20,7 @@ import scala.compiletime.uninitialized * It does this by generating a call to dotty.tools.dotc.util.Stats.doRecord. */ class Instrumentation extends MiniPhase { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Instrumentation.name diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index c95500d856be..c2fdccc2861e 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -3,11 +3,11 @@ package transform import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase object InterceptedMethods { @@ -23,7 +23,7 @@ object InterceptedMethods { * using the most precise overload available */ class InterceptedMethods extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = InterceptedMethods.name diff --git a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled index 68b493a0b9db..f6df2be19dc4 100644 --- a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled @@ -1,11 +1,11 @@ package dotty.tools.dotc package transform -import dotty.tools.dotc.util.Positions._ +import dotty.tools.dotc.util.Positions.* import MegaPhase.MiniPhase -import core._ -import Contexts._, Types._, Constants._, Decorators._, Symbols._ -import TypeUtils._, TypeErasure._, Flags._ +import core.* +import Contexts.*, Types.*, Constants.*, Decorators.*, Symbols.* +import TypeUtils.*, TypeErasure.*, Flags.* /** Implements partial evaluation of `sc.isInstanceOf[Sel]` according to: * @@ -31,7 +31,7 @@ import TypeUtils._, TypeErasure._, Flags._ */ class IsInstanceOfEvaluator extends MiniPhase { - import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dotc.ast.tpd.* val phaseName = "isInstanceOfEvaluator" diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala index 5be43fd56952..84f90e289e43 100644 --- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -1,19 +1,19 @@ package dotty.tools.dotc package transform -import MegaPhase._ +import MegaPhase.* import core.Denotations.NonSymSingleDenotation -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import core.Names._ -import core.NameOps._ +import core.Names.* +import core.NameOps.* import core.NameKinds.ExpandPrefixName -import SymUtils._ +import SymUtils.* import ExplicitOuter.outer import util.Store import collection.mutable.{HashMap, LinkedHashMap, ListBuffer} @@ -21,7 +21,7 @@ import collection.mutable.{HashMap, LinkedHashMap, ListBuffer} import scala.compiletime.uninitialized object LambdaLift: - import ast.tpd._ + import ast.tpd.* val name: String = "lambdaLift" val description: String = "lifts out nested functions to class scope" @@ -251,8 +251,8 @@ end LambdaLift * } */ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => - import LambdaLift._ - import ast.tpd._ + import LambdaLift.* + import ast.tpd.* override def phaseName: String = LambdaLift.name diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index 4fabaf99f843..0aaecd261387 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -22,8 +22,8 @@ import scala.collection.mutable import scala.compiletime.uninitialized class LazyVals extends MiniPhase with IdentityDenotTransformer { - import LazyVals._ - import tpd._ + import LazyVals.* + import tpd.* /** * The map contains the list of the offset trees. @@ -456,8 +456,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { - import dotty.tools.dotc.core.Types._ - import dotty.tools.dotc.core.Flags._ + import dotty.tools.dotc.core.Types.* + import dotty.tools.dotc.core.Flags.* val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) diff --git a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala index e7ff6d10c222..302001347d67 100644 --- a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala @@ -2,9 +2,9 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Symbols._, Decorators._ -import MegaPhase._ +import core.* +import Contexts.*, Symbols.*, Decorators.* +import MegaPhase.* /** Rewrite `{ stats; expr}.f(args)` to `{ stats; expr.f(args) }` and * `{ stats; expr }(args)` to `{ stats; expr(args) }` before proceeding, @@ -12,7 +12,7 @@ import MegaPhase._ * collapse applies of IFTs (this is done in Erasure). */ class LetOverApply extends MiniPhase: - import ast.tpd._ + import ast.tpd.* override def phaseName: String = LetOverApply.name diff --git a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled index 626cb9687df4..b9e6efe1b06b 100644 --- a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled +++ b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme -import ast.Trees._ +import ast.Trees.* import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Constants._ +import dotty.tools.dotc.core.Constants.* /** This phase rewrites idempotent expressions with constant types to Literals. * The constant types are eliminated by erasure, so we need to keep @@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Constants._ * in the type of the literal. */ class Literalize extends MiniPhase { thisTransform => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = "literalize" diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index 7878b5795703..40c6eee1382c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.core.DenotTransformers.DenotTransformer import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.MacroClassLoader import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.quoted.* import dotty.tools.dotc.util.SrcPos import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope} diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 7bb7ed365ebe..887a962f7a65 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package transform -import core._ -import Phases._ -import ast.Trees._ -import Contexts._ +import core.* +import Phases.* +import ast.Trees.* +import Contexts.* /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ abstract class MacroTransform extends Phase { - import ast.tpd._ + import ast.tpd.* override def run(using Context): Unit = { val unit = ctx.compilationUnit diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index eff634b485b8..252babe7058f 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -4,8 +4,8 @@ package transform import scala.compiletime.uninitialized -import core._ -import Contexts._, Phases._, Symbols._, Decorators._ +import core.* +import Contexts.*, Phases.*, Symbols.*, Decorators.* import Flags.PackageVal import staging.StagingLevel.* @@ -16,7 +16,7 @@ import staging.StagingLevel.* * is described in his thesis. */ object MegaPhase { - import ast.tpd._ + import ast.tpd.* /** The base class of tree transforms. For each kind of tree K, there are * two methods which can be overridden: @@ -138,10 +138,10 @@ object MegaPhase { singletonGroup.run } } -import MegaPhase._ +import MegaPhase.* class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { - import ast.tpd._ + import ast.tpd.* override val phaseName: String = if (miniPhases.length == 1) miniPhases(0).phaseName diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index 91f394866035..120f2f66cd80 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -1,22 +1,22 @@ package dotty.tools.dotc package transform -import core._ -import DenotTransformers._ -import Contexts._ +import core.* +import DenotTransformers.* +import Contexts.* import Phases.* import SymDenotations.SymDenotation -import Denotations._ -import Symbols._ -import SymUtils._ -import Constants._ -import MegaPhase._ -import NameOps._ -import Flags._ -import Decorators._ +import Denotations.* +import Symbols.* +import SymUtils.* +import Constants.* +import MegaPhase.* +import NameOps.* +import Flags.* +import Decorators.* import StdNames.nme -import sjs.JSSymUtils._ +import sjs.JSSymUtils.* import util.Store import scala.compiletime.uninitialized @@ -47,7 +47,7 @@ object Memoize { */ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => import Memoize.MyState - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Memoize.name diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index 5ca09dd6188f..33864a33a047 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -2,21 +2,21 @@ package dotty.tools package dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Flags._ -import SymUtils._ -import Symbols._ -import SymDenotations._ -import Types._ -import Decorators._ -import DenotTransformers._ -import StdNames._ -import Names._ -import NameKinds._ -import NameOps._ -import ast.Trees._ +import core.* +import MegaPhase.* +import Contexts.* +import Flags.* +import SymUtils.* +import Symbols.* +import SymDenotations.* +import Types.* +import Decorators.* +import DenotTransformers.* +import StdNames.* +import Names.* +import NameKinds.* +import NameOps.* +import ast.Trees.* object Mixin { val name: String = "mixin" @@ -111,7 +111,7 @@ object Mixin { * are symbolic. */ class Mixin extends MiniPhase with SymTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Mixin.name @@ -184,7 +184,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => override def transformTemplate(impl: Template)(using Context): Template = { val cls = impl.symbol.owner.asClass val ops = new MixinOps(cls, thisPhase) - import ops._ + import ops.* def traitDefs(stats: List[Tree]): List[Tree] = { stats.flatMap { diff --git a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala index fa1c09806893..d40a2a7eb17e 100644 --- a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala +++ b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ -import Symbols._, Types._, Contexts._, DenotTransformers._, Flags._ -import util.Spans._ -import SymUtils._ -import StdNames._, NameOps._ +import core.* +import Symbols.*, Types.*, Contexts.*, DenotTransformers.*, Flags.* +import util.Spans.* +import SymUtils.* +import StdNames.*, NameOps.* import typer.Nullables class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { - import ast.tpd._ + import ast.tpd.* val superCls: Symbol = cls.superClass val mixins: List[ClassSymbol] = cls.mixins diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index db96aeefe231..a417d41ffd56 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -1,26 +1,26 @@ package dotty.tools.dotc package transform -import core._ -import Flags._ -import Contexts._ -import Symbols._ +import core.* +import Flags.* +import Contexts.* +import Symbols.* import DenotTransformers.SymTransformer import Types.MethodType import Annotations.Annotation import SymDenotations.SymDenotation import Names.Name import StdNames.nme -import NameOps._ +import NameOps.* -import ast._ +import ast.* -import SymUtils._ -import MegaPhase._ +import SymUtils.* +import MegaPhase.* /** Move static methods from companion to the class itself */ class MoveStatics extends MiniPhase with SymTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = MoveStatics.name diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index a75d6da9dd6a..4bdcc8d9606d 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -1,15 +1,15 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._, Symbols._, Types._, Flags._, StdNames._ -import MegaPhase._ +import core.* +import Contexts.*, Symbols.*, Types.*, Flags.*, StdNames.* +import MegaPhase.* import NameKinds.NonLocalReturnKeyName import config.SourceVersion.* import Decorators.em object NonLocalReturns { - import ast.tpd._ + import ast.tpd.* val name: String = "nonLocalReturns" val description: String = "expand non-local returns" @@ -26,8 +26,8 @@ class NonLocalReturns extends MiniPhase { override def description: String = NonLocalReturns.description - import NonLocalReturns._ - import ast.tpd._ + import NonLocalReturns.* + import ast.tpd.* override def runsAfter: Set[String] = Set(ElimByName.name) diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 92039a8b9af8..4020291dded0 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package transform -import core._ -import Flags._, Symbols._, Contexts._, Scopes._, Decorators._, Types.Type +import core.* +import Flags.*, Symbols.*, Contexts.*, Scopes.*, Decorators.*, Types.Type import NameKinds.DefaultGetterName -import NullOpsDecorator._ +import NullOpsDecorator.* import collection.immutable.BitSet import scala.annotation.tailrec import cc.isCaptureChecking diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala index 8c93ffb90232..82ba3b7a1b7f 100644 --- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala +++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala @@ -2,10 +2,10 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._, Types._, Symbols._, Flags._, TypeUtils._, DenotTransformers._, StdNames._ -import Decorators._ -import MegaPhase._ +import core.* +import Contexts.*, Types.*, Symbols.*, Flags.*, TypeUtils.*, DenotTransformers.*, StdNames.* +import Decorators.* +import MegaPhase.* import NameKinds.ParamAccessorName /** For all private parameter accessors diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index a648a419d594..8f5eec693609 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -2,22 +2,22 @@ package dotty.tools package dotc package transform -import core._ -import MegaPhase._ -import Symbols._, Contexts._, Types._, StdNames._, NameOps._ +import core.* +import MegaPhase.* +import Symbols.*, Contexts.*, Types.*, StdNames.*, NameOps.* import patmat.SpaceEngine -import util.Spans._ +import util.Spans.* import typer.Applications.* -import SymUtils._ +import SymUtils.* import TypeUtils.* import Annotations.* -import Flags._, Constants._ -import Decorators._ +import Flags.*, Constants.* +import Decorators.* import NameKinds.{PatMatStdBinderName, PatMatAltsName, PatMatResultName} import config.Printers.patmatch -import reporting._ -import ast._ -import util.Property._ +import reporting.* +import ast.* +import util.Property.* import scala.annotation.tailrec import scala.collection.mutable @@ -27,8 +27,8 @@ import scala.collection.mutable * where every pattern is an integer or string constant */ class PatternMatcher extends MiniPhase { - import ast.tpd._ - import PatternMatcher._ + import ast.tpd.* + import PatternMatcher.* override def phaseName: String = PatternMatcher.name @@ -56,7 +56,7 @@ class PatternMatcher extends MiniPhase { } object PatternMatcher { - import ast.tpd._ + import ast.tpd.* val name: String = "patternMatcher" val description: String = "compile pattern matches" diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index b368e47bf0b3..3e3b14b6d0f3 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -1,25 +1,25 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ -import Constants._ -import ast.Trees._ +import core.* +import Decorators.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* +import Constants.* +import ast.Trees.* import ast.untpd import ast.TreeTypeMap -import SymUtils._ -import NameKinds._ +import SymUtils.* +import NameKinds.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.config.ScalaRelease.* -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.inlines.Inlines import scala.annotation.constructorOnly @@ -69,8 +69,8 @@ import scala.collection.mutable * */ class PickleQuotes extends MacroTransform { - import PickleQuotes._ - import tpd._ + import PickleQuotes.* + import tpd.* override def phaseName: String = PickleQuotes.name @@ -207,7 +207,7 @@ class PickleQuotes extends MacroTransform { } object PickleQuotes { - import tpd._ + import tpd.* val name: String = "pickleQuotes" val description: String = "turn quoted trees into explicit run-time data structures" diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 252bb6daeae5..4aea14fed2fc 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package transform -import core._ -import Contexts._ -import Decorators._ -import tasty._ +import core.* +import Contexts.* +import Decorators.* +import tasty.* import config.Printers.{noPrinter, pickling} import java.io.PrintStream -import Periods._ -import Phases._ -import Symbols._ +import Periods.* +import Phases.* +import Symbols.* import Flags.Module import reporting.{ThrowingReporter, Profile, Message} import collection.mutable @@ -30,7 +30,7 @@ object Pickler { /** This phase pickles trees */ class Pickler extends Phase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = Pickler.name diff --git a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala index 0cfd3650ad0b..26c956d85d18 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc package transform -import core._ +import core.* import Contexts.* import DenotTransformers.IdentityDenotTransformer import SyntheticMembers.* diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index ff322dad9ab6..90a3523561a9 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -4,20 +4,20 @@ package transform import dotty.tools.dotc.ast.{Trees, tpd, untpd, desugar} import scala.collection.mutable -import core._ +import core.* import dotty.tools.dotc.typer.Checking import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.typer.VarianceChecker import typer.ErrorReporting.errorTree -import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._ -import SymDenotations._, StdNames._, Annotations._, Trees._, Scopes._ -import Decorators._ -import Symbols._, SymUtils._, NameOps._ +import Types.*, Contexts.*, Names.*, Flags.*, DenotTransformers.*, Phases.* +import SymDenotations.*, StdNames.*, Annotations.*, Trees.*, Scopes.* +import Decorators.* +import Symbols.*, SymUtils.*, NameOps.* import ContextFunctionResults.annotateContextResults import config.Printers.typr import config.Feature import util.SrcPos -import reporting._ +import reporting.* import NameKinds.WildcardParamName object PostTyper { @@ -61,7 +61,7 @@ object PostTyper { * they do not warrant their own group of miniphases before pickling. */ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => - import tpd._ + import tpd.* override def phaseName: String = PostTyper.name diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 6d8f7bdb32cb..b6df581beee2 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package transform -import core.Contexts._ -import core.NameKinds._ -import core.Symbols._ -import core.Flags._ -import core.Decorators._ +import core.Contexts.* +import core.NameKinds.* +import core.Symbols.* +import core.Flags.* +import core.Decorators.* import core.Names.TermName import MegaPhase.MiniPhase import config.Printers.transforms @@ -48,7 +48,7 @@ object ProtectedAccessors { } class ProtectedAccessors extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ProtectedAccessors.name diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 17f2d11ccfec..f0de71dfc239 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ +import core.* +import Contexts.* import DenotTransformers.SymTransformer -import Flags._ -import SymDenotations._ -import Symbols._ +import Flags.* +import SymDenotations.* +import Symbols.* import typer.RefChecks import MegaPhase.MiniPhase import ast.tpd -import SymUtils._ +import SymUtils.* import config.Feature import Decorators.* import dotty.tools.dotc.core.Types.MethodType @@ -23,8 +23,8 @@ import dotty.tools.dotc.core.Types.MethodType * as IsInstanceOfChecker don't give false negatives. */ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => - import tpd._ - import PruneErasedDefs._ + import tpd.* + import PruneErasedDefs.* override def phaseName: String = PruneErasedDefs.name @@ -66,7 +66,7 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => } object PruneErasedDefs { - import tpd._ + import tpd.* val name: String = "pruneErasedDefs" val description: String = "drop erased definitions and simplify erased expressions" diff --git a/compiler/src/dotty/tools/dotc/transform/PureStats.scala b/compiler/src/dotty/tools/dotc/transform/PureStats.scala index b747d7d6b9e4..22fdef3aaab6 100644 --- a/compiler/src/dotty/tools/dotc/transform/PureStats.scala +++ b/compiler/src/dotty/tools/dotc/transform/PureStats.scala @@ -2,10 +2,10 @@ package dotty.tools.dotc package transform import ast.{Trees, tpd} -import core._, core.Decorators._ -import MegaPhase._ -import Types._, Contexts._, Flags._, DenotTransformers._ -import Symbols._, StdNames._, Trees._ +import core.*, core.Decorators.* +import MegaPhase.* +import Types.*, Contexts.*, Flags.*, DenotTransformers.* +import Symbols.*, StdNames.*, Trees.* object PureStats { val name: String = "pureStats" @@ -15,7 +15,7 @@ object PureStats { /** Remove pure statements in blocks */ class PureStats extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = PureStats.name diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index 6e73d683fa2c..2fd9f923d98e 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -1,22 +1,22 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ -import SymUtils._ -import NameKinds._ +import core.* +import Decorators.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* +import SymUtils.* +import NameKinds.* import dotty.tools.dotc.ast.tpd -import tpd._ +import tpd.* import scala.collection.mutable -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.quoted.* import scala.annotation.constructorOnly diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala index d6c11fe36748..8ed1edcd0308 100644 --- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala @@ -1,16 +1,16 @@ package dotty.tools.dotc package transform -import core._ -import ast.tpd._ -import Contexts._ -import MegaPhase._ -import Annotations._ +import core.* +import ast.tpd.* +import Contexts.* +import MegaPhase.* +import Annotations.* import Symbols.defn -import Constants._ -import Types._ -import Decorators._ -import Flags._ +import Constants.* +import Types.* +import Decorators.* +import Flags.* import scala.collection.mutable diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala index 99b6be1eea8a..e864178af658 100644 --- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala +++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala @@ -1,19 +1,19 @@ package dotty.tools.dotc package transform -import core._ -import MegaPhase._ -import Contexts._ -import Flags._ -import SymUtils._ -import Symbols._ -import Decorators._ -import DenotTransformers._ -import Names._ -import NameOps._ -import NameKinds._ -import NullOpsDecorator._ -import ResolveSuper._ +import core.* +import MegaPhase.* +import Contexts.* +import Flags.* +import SymUtils.* +import Symbols.* +import Decorators.* +import DenotTransformers.* +import Names.* +import NameOps.* +import NameKinds.* +import NullOpsDecorator.* +import ResolveSuper.* import reporting.IllegalSuperAccessor /** This phase implements super accessors in classes that need them. @@ -31,7 +31,7 @@ import reporting.IllegalSuperAccessor * Mixin, which runs after erasure. */ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = ResolveSuper.name @@ -45,7 +45,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase = override def transformTemplate(impl: Template)(using Context): Template = { val cls = impl.symbol.owner.asClass val ops = new MixinOps(cls, thisPhase) - import ops._ + import ops.* def superAccessors(mixin: ClassSymbol): List[Tree] = for superAcc <- mixin.info.decls.filter(_.isSuperAccessor) @@ -66,7 +66,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase = assert(ddef.rhs.isEmpty, ddef.symbol) val cls = meth.owner.asClass val ops = new MixinOps(cls, thisPhase) - import ops._ + import ops.* DefDef(meth, forwarderRhsFn(rebindSuper(cls, meth))) } else ddef diff --git a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala index d01be0419a4d..e66f5e4b37b8 100644 --- a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala +++ b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala @@ -1,11 +1,11 @@ package dotty.tools.dotc package transform -import core._ +import core.* import DenotTransformers.IdentityDenotTransformer -import Contexts._ -import Symbols._ -import Scopes._ +import Contexts.* +import Symbols.* +import Scopes.* import MegaPhase.MiniPhase /** The preceding lambda lift and flatten phases move symbols to different scopes @@ -13,7 +13,7 @@ import MegaPhase.MiniPhase * class scopes contain the symbols defined in them. */ class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase => - import ast.tpd._ + import ast.tpd.* override def phaseName: String = RestoreScopes.name diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala index 1df9809c2f62..6177e5d0839d 100644 --- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala @@ -2,13 +2,13 @@ package dotty.tools.dotc package transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core._ -import dotty.tools.dotc.transform.MegaPhase._ -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.* +import dotty.tools.dotc.transform.MegaPhase.* +import dotty.tools.dotc.transform.SymUtils.* /** Removes `Select`s that would be compiled into `GetStatic`. * @@ -43,7 +43,7 @@ import dotty.tools.dotc.transform.SymUtils._ * @author Dmytro Petrashko */ class SelectStatic extends MiniPhase with IdentityDenotTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SelectStatic.name diff --git a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala index 2f586104c4e3..20f4e6d85daa 100644 --- a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala +++ b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import core._ -import dotty.tools.dotc.transform.MegaPhase._ -import Contexts._ +import core.* +import dotty.tools.dotc.transform.MegaPhase.* +import Contexts.* /** A transformer that eliminates SeqLiteral's, transforming `SeqLiteral(elems)` to an operation * equivalent to @@ -15,7 +15,7 @@ import Contexts._ * keep a precise type after erasure, whereas SeqLiterals only get the erased type `Seq`, */ class SeqLiterals extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SeqLiterals.name diff --git a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala index d17dbbecc555..f62b1f5f01f2 100644 --- a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala +++ b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.Phase /** Set the `rootTreeOrProvider` property of class symbols. */ diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala index 5c5c02c1bc75..fd314b94e50c 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import ast.Trees._, ast.tpd, core._ -import Contexts._, Types._, Decorators._, Symbols._, DenotTransformers._ -import SymDenotations._, Scopes._, StdNames._, NameOps._, Names._ +import ast.Trees.*, ast.tpd, core.* +import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.* +import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.* import MegaPhase.MiniPhase import scala.collection.mutable @@ -18,7 +18,7 @@ import scala.collection.mutable * different standard library. */ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SpecializeApplyMethods.name diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index c50eaddd3213..f41900d31b66 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import ast.Trees._, ast.tpd, core._ -import Contexts._, Types._, Decorators._, Symbols._, DenotTransformers._ -import SymDenotations._, Scopes._, StdNames._, NameOps._, Names._ +import ast.Trees.*, ast.tpd, core.* +import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.* +import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.* import MegaPhase.MiniPhase @@ -11,7 +11,7 @@ import MegaPhase.MiniPhase * specialized form. */ class SpecializeFunctions extends MiniPhase { - import ast.tpd._ + import ast.tpd.* override def phaseName: String = SpecializeFunctions.name diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 46a8571b4f30..1f7b65ba66a6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -7,14 +7,14 @@ import java.io.{PrintWriter, StringWriter} import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameKinds.FlatName import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Denotations.staticRef import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.core.Constants.Constant @@ -30,7 +30,7 @@ import scala.reflect.ClassTag import dotty.tools.dotc.quoted.{PickledQuotes, QuoteUtils} import scala.quoted.Quotes -import scala.quoted.runtime.impl._ +import scala.quoted.runtime.impl.* import dotty.tools.dotc.core.NameKinds /** Utility class to splice quoted expressions */ diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala index dd95d5a9ca1e..0c64a366686d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala @@ -1,25 +1,25 @@ package dotty.tools.dotc package transform -import core._ -import Decorators._ -import Flags._ -import Types._ -import Contexts._ -import Symbols._ -import Constants._ -import ast.Trees._ +import core.* +import Decorators.* +import Flags.* +import Types.* +import Contexts.* +import Symbols.* +import Constants.* +import ast.Trees.* import ast.{TreeTypeMap, untpd} -import util.Spans._ -import SymUtils._ -import NameKinds._ +import util.Spans.* +import SymUtils.* +import NameKinds.* import dotty.tools.dotc.ast.tpd import scala.collection.mutable -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.quoted._ +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.quoted.* import dotty.tools.dotc.config.ScalaRelease.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.staging.QuoteTypeTags @@ -72,7 +72,7 @@ object Splicing: * */ class Splicing extends MacroTransform: - import tpd._ + import tpd.* override def phaseName: String = Splicing.name diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index 43cbe80ce8c4..f7fac1981fb2 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -2,15 +2,15 @@ package dotty.tools.dotc package transform import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Phases.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.inlines.Inlines import dotty.tools.dotc.util.SrcPos -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.staging.StagingLevel.* import dotty.tools.dotc.staging.CrossStageSafety import dotty.tools.dotc.staging.HealType @@ -20,7 +20,7 @@ import dotty.tools.dotc.staging.HealType * See `CrossStageSafety` */ class Staging extends MacroTransform { - import tpd._ + import tpd.* override def phaseName: String = Staging.name diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index b15c50158b75..2d8d51b4059f 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -5,13 +5,13 @@ package transform import dotty.tools.dotc.ast.{Trees, tpd} import scala.collection.mutable import ValueClasses.isMethodWithExtension -import core._ -import Contexts._, Flags._, Symbols._, Names._, StdNames._, NameOps._, Trees._ -import TypeUtils._, SymUtils._ +import core.* +import Contexts.*, Flags.*, Symbols.*, Names.*, StdNames.*, NameOps.*, Trees.* +import TypeUtils.*, SymUtils.* import DenotTransformers.DenotTransformer -import Symbols._ -import util.Spans._ -import Decorators._ +import Symbols.* +import util.Spans.* +import Decorators.* import NameKinds.{ SuperAccessorName, ExpandPrefixName } /** This class adds super accessors for all super calls that either @@ -32,7 +32,7 @@ import NameKinds.{ SuperAccessorName, ExpandPrefixName } */ class SuperAccessors(thisPhase: DenotTransformer) { - import tpd._ + import tpd.* /** Some parts of trees will get a new owner in subsequent phases. * These are value class methods, which will become extension methods. diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index 79b5ecbf30c7..ddee2588b152 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -1,21 +1,21 @@ package dotty.tools.dotc package transform -import core._ -import Types._ -import Contexts._ -import Symbols._ -import SymDenotations._ -import Names._ -import NameOps._ -import StdNames._ -import NameKinds._ -import Flags._ +import core.* +import Types.* +import Contexts.* +import Symbols.* +import SymDenotations.* +import Names.* +import NameOps.* +import StdNames.* +import NameKinds.* +import Flags.* import ValueClasses.isDerivedValueClass -import Decorators._ +import Decorators.* import Constants.Constant import Annotations.Annotation -import Phases._ +import Phases.* import ast.tpd.Literal import dotty.tools.dotc.transform.sjs.JSSymUtils.sjsNeedsField diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 200e7eb2c215..9d19251638db 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -1,21 +1,21 @@ package dotty.tools.dotc package transform -import core._ -import Symbols._, Types._, Contexts._, Names._, StdNames._, Constants._, SymUtils._ -import Flags._ -import DenotTransformers._ -import Decorators._ -import NameOps._ +import core.* +import Symbols.*, Types.*, Contexts.*, Names.*, StdNames.*, Constants.*, SymUtils.* +import Flags.* +import DenotTransformers.* +import Decorators.* +import NameOps.* import Annotations.Annotation import typer.ProtoTypes.constrained import ast.untpd import ValueClasses.isDerivedValueClass -import SymUtils._ +import SymUtils.* import util.Property import util.Spans.Span import config.Printers.derive -import NullOpsDecorator._ +import NullOpsDecorator.* object SyntheticMembers { @@ -53,8 +53,8 @@ object SyntheticMembers { * def hashCode(): Int */ class SyntheticMembers(thisPhase: DenotTransformer) { - import SyntheticMembers._ - import ast.tpd._ + import SyntheticMembers.* + import ast.tpd.* private var myValueSymbols: List[Symbol] = Nil private var myCaseSymbols: List[Symbol] = Nil diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 45330b6e9f5d..43c740ce7d38 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -107,7 +107,7 @@ import scala.compiletime.uninitialized * moved after erasure and adapted to emit `Labeled` blocks by Sébastien Doeraene */ class TailRec extends MiniPhase { - import tpd._ + import tpd.* override def phaseName: String = TailRec.name diff --git a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala index ffed65f7676e..2be41ba208f1 100644 --- a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala +++ b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package transform -import MegaPhase._ -import core.DenotTransformers._ -import core.Contexts._ +import MegaPhase.* +import core.DenotTransformers.* +import core.Contexts.* import ast.tpd /** This phase transforms wildcards in valdefs with their default value. @@ -12,7 +12,7 @@ import ast.tpd * */ class TransformWildcards extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = TransformWildcards.name diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 2badb4cfc1c3..8b7fdaf301d0 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -3,24 +3,24 @@ package dotc package transform import core.Names.Name -import core.DenotTransformers._ -import core.SymDenotations._ -import core.Contexts._ -import core.Symbols._ -import core.Types._ -import core.Flags._ -import core.StdNames._ +import core.DenotTransformers.* +import core.SymDenotations.* +import core.Contexts.* +import core.Symbols.* +import core.Types.* +import core.Flags.* +import core.StdNames.* import core.NameKinds.{DocArtifactName, OuterSelectName} -import core.Decorators._ -import core.Phases._ +import core.Decorators.* +import core.Phases.* import core.Mode -import typer._ -import reporting._ -import ast.Trees._ +import typer.* +import reporting.* +import ast.Trees.* import ast.{tpd, untpd} -import util.Chars._ +import util.Chars.* import collection.mutable -import ProtoTypes._ +import ProtoTypes.* import staging.StagingLevel import inlines.Inlines.inInlineMethod @@ -39,8 +39,8 @@ import scala.util.control.NonFatal * represented as TypeTrees then). */ class TreeChecker extends Phase with SymTransformer { - import ast.tpd._ - import TreeChecker._ + import ast.tpd.* + import TreeChecker.* private val seenClasses = collection.mutable.HashMap[String, Symbol]() private val seenModuleVals = collection.mutable.HashMap[String, Symbol]() @@ -186,7 +186,7 @@ object TreeChecker { * tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs. */ object TreeNodeChecker extends untpd.TreeTraverser: - import untpd._ + import untpd.* def traverse(tree: Tree)(using Context) = tree match case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t") case t @ TypeApply(fun, _targs) => traverse(fun) @@ -207,7 +207,7 @@ object TreeChecker { class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { - import ast.tpd._ + import ast.tpd.* protected val nowDefinedSyms = util.HashSet[Symbol]() private val patBoundSyms = util.HashSet[Symbol]() diff --git a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala index aec44d5987bf..8d5b7c28bbbc 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala @@ -2,12 +2,12 @@ package dotty.tools.dotc package transform import ast.{Trees, tpd} -import core._ -import Contexts._, Trees._, Types._, StdNames._, Symbols._ -import ValueClasses._ +import core.* +import Contexts.*, Trees.*, Types.*, StdNames.*, Symbols.* +import ValueClasses.* object TreeExtractors { - import tpd._ + import tpd.* /** Match arg1.op(arg2) and extract (arg1, op.symbol, arg2) */ object BinaryOp { diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala index 92d22b1cc57e..095c6af60766 100644 --- a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import core.Symbols._ -import core.StdNames._ -import core.Types._ +import core.Symbols.* +import core.StdNames.* +import core.Types.* import core.NameKinds.ExceptionBinderName import dotty.tools.dotc.core.Flags -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.util.Spans.Span @@ -39,7 +39,7 @@ import dotty.tools.dotc.util.Spans.Span * */ class TryCatchPatterns extends MiniPhase { - import dotty.tools.dotc.ast.tpd._ + import dotty.tools.dotc.ast.tpd.* override def phaseName: String = TryCatchPatterns.name diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala index 6fba0bca4ce3..fee7bb19e0be 100644 --- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala +++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala @@ -1,21 +1,21 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ -import Decorators._ -import Definitions._ -import DenotTransformers._ -import StdNames._ -import Symbols._ -import MegaPhase._ -import Types._ +import core.* +import Contexts.* +import Decorators.* +import Definitions.* +import DenotTransformers.* +import StdNames.* +import Symbols.* +import MegaPhase.* +import Types.* import dotty.tools.dotc.ast.tpd /** Optimize generic operations on tuples */ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = TupleOptimizations.name diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 556204ab89ab..f8092ba51c2a 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -4,16 +4,16 @@ package transform import scala.language.unsafeNulls as _ -import core._ -import Contexts._, Symbols._, Types._, Constants._, StdNames._, Decorators._ +import core.* +import Contexts.*, Symbols.*, Types.*, Constants.*, StdNames.*, Decorators.* import ast.untpd -import Erasure.Boxing._ -import TypeErasure._ -import ValueClasses._ -import SymUtils._ -import core.Flags._ -import util.Spans._ -import reporting._ +import Erasure.Boxing.* +import TypeErasure.* +import ValueClasses.* +import SymUtils.* +import core.Flags.* +import util.Spans.* +import reporting.* import config.Printers.{ transforms => debug } import patmat.Typ @@ -29,7 +29,7 @@ import patmat.Typ * cannot be rewritten before erasure. That's why TypeTestsCasts is called from Erasure. */ object TypeTestsCasts { - import ast.tpd._ + import ast.tpd.* import typer.Inferencing.maximizeType import typer.ProtoTypes.constrained diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index 779552a3d46f..9528e683cc55 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -2,11 +2,11 @@ package dotty.tools package dotc package transform -import core._ +import core.* import TypeErasure.ErasedValueType -import Types._ -import Contexts._ -import Symbols._ +import Types.* +import Contexts.* +import Symbols.* import Names.Name import dotty.tools.dotc.core.Decorators.* diff --git a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala index 95d40102c5a7..6e02ea7c227c 100644 --- a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala +++ b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala @@ -1,12 +1,12 @@ package dotty.tools.dotc package transform -import MegaPhase._ +import MegaPhase.* import core.DenotTransformers.{IdentityDenotTransformer} -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* import ast.tpd object UncacheGivenAliases: @@ -23,7 +23,7 @@ object UncacheGivenAliases: */ class UncacheGivenAliases extends MiniPhase with IdentityDenotTransformer: thisPhase => - import tpd._ + import tpd.* override def phaseName: String = UncacheGivenAliases.name diff --git a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala index a7ccaa19d90a..f22fc53e9b6e 100644 --- a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala @@ -1,10 +1,10 @@ package dotty.tools.dotc package transform -import core._ -import Contexts._ -import Flags._ -import Symbols._ +import core.* +import Contexts.* +import Flags.* +import Symbols.* import MegaPhase.MiniPhase import StdNames.nme import ast.tpd @@ -19,7 +19,7 @@ import ast.tpd * @syntax markdown */ class UninitializedDefs extends MiniPhase: - import tpd._ + import tpd.* override def phaseName: String = UninitializedDefs.name diff --git a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala index 879a885d626e..6430dd7248b1 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala @@ -2,10 +2,10 @@ package dotty.tools.dotc package transform import ast.tpd -import core._ -import Contexts._, Symbols._, Types._, Flags._, Phases._ -import DenotTransformers._, MegaPhase._ -import TreeExtractors._, ValueClasses._ +import core.* +import Contexts.*, Symbols.*, Types.*, Flags.*, Phases.* +import DenotTransformers.*, MegaPhase.* +import TreeExtractors.*, ValueClasses.* /** This phase elides unnecessary value class allocations * @@ -16,7 +16,7 @@ import TreeExtractors._, ValueClasses._ * (new V(u)).underlying() => u */ class VCElideAllocations extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = VCElideAllocations.name diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala index 219945d4ebb1..fb1dd04bd6ad 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala @@ -3,10 +3,10 @@ package dotc package transform import ast.{Trees, tpd} -import core._ -import Contexts._, Trees._, Types._ -import DenotTransformers._, MegaPhase._ -import ExtensionMethods._, ValueClasses._ +import core.* +import Contexts.*, Trees.*, Types.* +import DenotTransformers.*, MegaPhase.* +import ExtensionMethods.*, ValueClasses.* /** This phase inlines calls to methods of value classes. @@ -40,7 +40,7 @@ import ExtensionMethods._, ValueClasses._ * need to have any knowledge of the name mangling done by other phases. */ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { - import tpd._ + import tpd.* override def phaseName: String = VCInlineMethods.name diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala index 28d1255eaa72..d0c012322fce 100644 --- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala @@ -1,14 +1,14 @@ package dotty.tools.dotc package transform -import core._ -import Types._ -import Symbols._ -import Contexts._ -import Phases._ -import Flags._ -import StdNames._ -import SymUtils._ +import core.* +import Types.* +import Symbols.* +import Contexts.* +import Phases.* +import Flags.* +import StdNames.* +import SymUtils.* /** Methods that apply to user-defined value classes */ object ValueClasses { diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index 3cf74ee3fdb3..1365bc36e958 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -2,16 +2,16 @@ package dotty.tools.dotc package transform import dotty.tools.dotc.ast.{tpd, untpd} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Phases.{Phase, postTyperPhase} -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.util.SourceFile /** Ycheck inlined positions */ class YCheckPositions extends Phase { - import tpd._ + import tpd.* override def phaseName: String = YCheckPositions.name diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 8478cefbc764..7cf028c95064 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -2,22 +2,22 @@ package dotty.tools.dotc package transform package init -import dotty.tools.dotc._ +import dotty.tools.dotc.* import ast.tpd -import tpd._ +import tpd.* -import dotty.tools.dotc.core._ -import Contexts._ -import Types._ -import Symbols._ -import StdNames._ +import dotty.tools.dotc.core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* -import dotty.tools.dotc.transform._ -import Phases._ +import dotty.tools.dotc.transform.* +import Phases.* import scala.collection.mutable -import Semantic._ +import Semantic.* import dotty.tools.unsupported class Checker extends Phase: diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 366fd6be96a2..85feb609c90a 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -3,11 +3,11 @@ package dotc package transform package init -import ast.tpd._ -import core._ +import ast.tpd.* +import core.* import util.Property import util.SourcePosition -import Types._, Symbols._, Contexts._ +import Types.*, Symbols.*, Contexts.* import Trace.Trace diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala index 98380686c6aa..dc9ab3bfc7a1 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -7,7 +7,7 @@ import Contexts.* import ast.tpd.* import util.SourcePosition -import Decorators._, printing.SyntaxHighlighting +import Decorators.*, printing.SyntaxHighlighting import scala.collection.mutable diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala index ff8d89920791..9e40792895c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala @@ -10,9 +10,9 @@ import scala.util.matching.Regex.Match import PartialFunction.cond import dotty.tools.dotc.ast.tpd.{Match => _, *} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.Phases.typerPhase import dotty.tools.dotc.util.Spans.Span diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala index 5cad7ba72831..7743054f5487 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala @@ -4,12 +4,12 @@ package transform.localopt import scala.language.unsafeNulls import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.typer.ConstFold diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 7b5a80bc7b42..50f6a6becef6 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -3,23 +3,23 @@ package dotc package transform package patmat -import core._ -import Types._ -import TypeUtils._ -import Contexts._ -import Flags._ -import ast._ +import core.* +import Types.* +import TypeUtils.* +import Contexts.* +import Flags.* +import ast.* import Decorators.{ show => _, * } -import Symbols._ -import StdNames._ -import NameOps._ -import Constants._ -import typer._ -import Applications._ -import Inferencing._ -import ProtoTypes._ -import transform.SymUtils._ -import reporting._ +import Symbols.* +import StdNames.* +import NameOps.* +import Constants.* +import typer.* +import Applications.* +import Inferencing.* +import ProtoTypes.* +import transform.SymUtils.* +import reporting.* import config.Printers.{exhaustivity => debug} import util.{SrcPos, NoSourcePosition} @@ -116,7 +116,7 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space case class Or(spaces: Seq[Space]) extends Space object SpaceEngine { - import tpd._ + import tpd.* def simplify(space: Space)(using Context): Space = space.simplify def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala index 6471e58d4ddc..951024f3d4db 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala @@ -3,12 +3,12 @@ package dotc package transform package sjs -import MegaPhase._ +import MegaPhase.* import core.Constants -import core.Contexts._ -import core.Decorators._ +import core.Contexts.* +import core.Decorators.* import core.StdNames.nme -import core.Symbols._ +import core.Symbols.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -46,7 +46,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn */ class AddLocalJSFakeNews extends MiniPhase { thisPhase => import ExplicitOuter.outer - import ast.tpd._ + import ast.tpd.* override def phaseName: String = AddLocalJSFakeNews.name diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 71783c509596..4d27ecee12fa 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -5,27 +5,27 @@ package sjs import scala.compiletime.uninitialized -import MegaPhase._ -import core.Annotations._ -import core.Constants._ -import core.Denotations._ -import core.DenotTransformers._ -import core.Symbols._ -import core.Contexts._ -import core.Types._ -import core.Flags._ -import core.Decorators._ +import MegaPhase.* +import core.Annotations.* +import core.Constants.* +import core.Denotations.* +import core.DenotTransformers.* +import core.Symbols.* +import core.Contexts.* +import core.Types.* +import core.Flags.* +import core.Decorators.* import core.StdNames.nme import core.SymDenotations.SymDenotation -import core.Names._ -import core.NameKinds._ -import SymUtils._ +import core.Names.* +import core.NameKinds.* +import SymUtils.* import util.Store import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import JSSymUtils._ +import JSSymUtils.* /** This phase makes all JS classes explicit (their definitions and references to them). * @@ -230,8 +230,8 @@ import JSSymUtils._ * created by step (C). */ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => - import ExplicitJSClasses._ - import ast.tpd._ + import ExplicitJSClasses.* + import ast.tpd.* override def phaseName: String = ExplicitJSClasses.name diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala index 9abf9a919d6d..2b0ed3c4880e 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala @@ -4,9 +4,9 @@ package sjs import scala.language.unsafeNulls -import core._ +import core.* import NameKinds.DefaultGetterName -import Names._ +import Names.* /** Utilities for JS exports handling. */ diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index ae6635bce622..fafa1eb3cf79 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -2,18 +2,18 @@ package dotty.tools.dotc package transform package sjs -import core._ -import Constants._ -import Contexts._ -import Flags._ -import NameOps._ -import Names._ -import Phases._ -import StdNames._ -import Symbols._ -import SymUtils._ -import ast.Trees._ -import Types._ +import core.* +import Constants.* +import Contexts.* +import Flags.* +import NameOps.* +import Names.* +import Phases.* +import StdNames.* +import Symbols.* +import SymUtils.* +import ast.Trees.* +import Types.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala index b911d7dfab96..7655eb79d6d4 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala @@ -4,18 +4,18 @@ package sjs import scala.annotation.tailrec -import dotty.tools.dotc.core._ -import Constants._ -import Contexts._ -import Flags._ -import Names._ -import Scopes._ -import Symbols._ -import StdNames._ -import Types._ +import dotty.tools.dotc.core.* +import Constants.* +import Contexts.* +import Flags.* +import Names.* +import Scopes.* +import Symbols.* +import StdNames.* +import Types.* import Decorators.em -import dotty.tools.dotc.transform.MegaPhase._ +import dotty.tools.dotc.transform.MegaPhase.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn @@ -106,8 +106,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * some point in the future. */ class JUnitBootstrappers extends MiniPhase { - import JUnitBootstrappers._ - import ast.tpd._ + import JUnitBootstrappers.* + import ast.tpd.* override def phaseName: String = JUnitBootstrappers.name diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index 25ab46712e70..d7073ac2e261 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -3,29 +3,29 @@ package transform package sjs import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators._ -import Denotations._ -import Flags._ +import dotty.tools.dotc.core.* +import Contexts.* +import Decorators.* +import Denotations.* +import Flags.* import NameKinds.DefaultGetterName -import StdNames._ -import Symbols._ -import SymUtils._ -import Types._ +import StdNames.* +import Symbols.* +import SymUtils.* +import Types.* import util.Spans.Span import util.SrcPos import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import JSExportUtils._ -import JSSymUtils._ +import JSExportUtils.* +import JSSymUtils.* import org.scalajs.ir.Names.DefaultModuleID import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName object PrepJSExports { - import tpd._ + import tpd.* import PrepJSInterop.{checkSetterSignature, isJSAny, isPrivateMaybeWithin} private sealed abstract class ExportDestination diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index a2f9a0fb45a3..2da2a98837c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -5,23 +5,23 @@ package sjs import scala.collection.mutable import ast.tpd -import core._ +import core.* import typer.Checking import util.SrcPos -import Annotations._ -import Constants._ -import Contexts._ -import Decorators._ -import DenotTransformers._ -import Flags._ +import Annotations.* +import Constants.* +import Contexts.* +import Decorators.* +import DenotTransformers.* +import Flags.* import NameKinds.{DefaultGetterName, ModuleClassName} -import NameOps._ -import StdNames._ -import Symbols._ -import SymUtils._ -import Types._ +import NameOps.* +import StdNames.* +import Symbols.* +import SymUtils.* +import Types.* -import JSSymUtils._ +import JSSymUtils.* import org.scalajs.ir.Trees.JSGlobalRef @@ -52,8 +52,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * pickling. */ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisPhase => - import PrepJSInterop._ - import tpd._ + import PrepJSInterop.* + import tpd.* override def phaseName: String = PrepJSInterop.name @@ -68,7 +68,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP new ScalaJSPrepJSInteropTransformer class ScalaJSPrepJSInteropTransformer extends Transformer with Checking { - import PrepJSExports._ + import PrepJSExports.* /** Kind of the directly enclosing (most nested) owner. */ private var enclosingOwner: OwnerKind = OwnerKind.None diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index fb05af087a19..21f245da9485 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -2,33 +2,33 @@ package dotty.tools package dotc package typer -import core._ +import core.* import ast.{Trees, tpd, untpd, desugar} import util.Stats.record import util.{SrcPos, NoSourcePosition} -import Contexts._ -import Flags._ -import Symbols._ +import Contexts.* +import Flags.* +import Symbols.* import Denotations.Denotation -import Types._ -import Decorators._ -import ErrorReporting._ -import Trees._ -import Names._ -import StdNames._ -import ContextOps._ +import Types.* +import Decorators.* +import ErrorReporting.* +import Trees.* +import Names.* +import StdNames.* +import ContextOps.* import NameKinds.DefaultGetterName -import ProtoTypes._ -import Inferencing._ -import reporting._ -import transform.TypeUtils._ -import transform.SymUtils._ -import Nullables._, NullOpsDecorator.* +import ProtoTypes.* +import Inferencing.* +import reporting.* +import transform.TypeUtils.* +import transform.SymUtils.* +import Nullables.*, NullOpsDecorator.* import config.Feature import collection.mutable import config.Printers.{overload, typr, unapp} -import TypeApplications._ +import TypeApplications.* import Annotations.Annotation import Constants.{Constant, IntTag} @@ -38,7 +38,7 @@ import annotation.threadUnsafe import scala.util.control.NonFatal object Applications { - import tpd._ + import tpd.* def extractorMember(tp: Type, name: Name)(using Context): SingleDenotation = tp.member(name).suchThat(sym => sym.info.isParameterless && sym.info.widenExpr.isValueType) @@ -352,7 +352,7 @@ object Applications { trait Applications extends Compatibility { self: Typer & Dynamic => - import Applications._ + import Applications.* import tpd.{ cpy => _, _ } import untpd.cpy diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index c8026ad5784b..75871f2ab16a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._ -import Types._ -import Flags._ -import Names._ -import StdNames._ -import Symbols._ -import Trees._ -import ProtoTypes._ -import Scopes._ -import CheckRealizable._ +import core.* +import ast.* +import Contexts.* +import Types.* +import Flags.* +import Names.* +import StdNames.* +import Symbols.* +import Trees.* +import ProtoTypes.* +import Scopes.* +import CheckRealizable.* import ErrorReporting.errorTree import util.Spans.Span import Phases.refchecksPhase @@ -23,29 +23,29 @@ import util.SrcPos import util.Spans.Span import rewrites.Rewrites.patch import inlines.Inlines -import transform.SymUtils._ -import transform.ValueClasses._ -import Decorators._ +import transform.SymUtils.* +import transform.ValueClasses.* +import Decorators.* import ErrorReporting.{err, errorType} import config.Printers.{typr, patmatch} import NameKinds.DefaultGetterName -import NameOps._ +import NameOps.* import SymDenotations.{NoCompleter, NoDenotation} import Applications.unapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import config.Feature import config.Feature.sourceVersion -import config.SourceVersion._ +import config.SourceVersion.* import printing.Formatting.hlAsKeyword import transform.TypeUtils.* import cc.isCaptureChecking import collection.mutable -import reporting._ +import reporting.* object Checking { - import tpd._ + import tpd.* /** Add further information for error messages involving applied types if the * type is inferred: @@ -853,7 +853,7 @@ object Checking { trait Checking { - import tpd._ + import tpd.* def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = Checking.checkNonCyclic(sym, info, reportErrors) @@ -884,7 +884,7 @@ trait Checking { case NonConforming, RefutableExtractor def fail(pat: Tree, pt: Type, reason: Reason): Boolean = { - import Reason._ + import Reason.* val message = reason match case NonConforming => var reportedPt = pt.dropAnnot(defn.UncheckedAnnot) @@ -1579,7 +1579,7 @@ trait Checking { } trait ReChecking extends Checking { - import tpd._ + import tpd.* override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () @@ -1595,7 +1595,7 @@ trait ReChecking extends Checking { } trait NoChecking extends ReChecking { - import tpd._ + import tpd.* override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala index b55c8c64e3b1..75894d2dd5b9 100644 --- a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala +++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala @@ -3,19 +3,19 @@ package typer import java.lang.ArithmeticException -import ast._ -import core._ -import Symbols._ -import Types._ -import Constants._ -import Names._ -import StdNames._ -import Contexts._ -import transform.TypeUtils._ +import ast.* +import core.* +import Symbols.* +import Types.* +import Constants.* +import Names.* +import StdNames.* +import Contexts.* +import transform.TypeUtils.* object ConstFold: - import tpd._ + import tpd.* private val foldedBinops = Set[Name]( nme.ZOR, nme.OR, nme.XOR, nme.ZAND, nme.AND, nme.EQ, nme.NE, diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 8fdc468780ba..818781ae7ccb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -2,13 +2,13 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import ast.Trees._ -import StdNames._ -import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._, Decorators._ -import ProtoTypes._, ContextOps._ -import util.Spans._ +import core.* +import ast.* +import ast.Trees.* +import StdNames.* +import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.*, Decorators.* +import ProtoTypes.*, ContextOps.* +import util.Spans.* import util.SrcPos import collection.mutable import ErrorReporting.errorTree @@ -266,7 +266,7 @@ trait Deriving { /** The synthesized type class instance definitions */ def syntheticDefs: List[tpd.Tree] = { - import tpd._ + import tpd.* /** The type class instance definition with symbol `sym` */ def typeclassInstance(sym: Symbol)(using Context): List[List[tpd.Tree]] => tpd.Tree = diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala index d819528ff556..33ef3e85e14e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala +++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala @@ -2,8 +2,8 @@ package dotty.tools package dotc package typer -import core._ -import Contexts._, Symbols._, Decorators._, Comments.{_, given} +import core.* +import Contexts.*, Symbols.*, Decorators.*, Comments.{_, given} import ast.tpd object Docstrings { diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 51734e1a5d4b..71b32b639997 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -67,8 +67,8 @@ object DynamicUnapply { trait Dynamic { self: Typer & Applications => - import Dynamic._ - import tpd._ + import Dynamic.* + import tpd.* /** Translate selection that does not typecheck according to the normal rules into a applyDynamic/applyDynamicNamed. * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...) diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 499d57e0518e..68143dfd2ba0 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -2,22 +2,22 @@ package dotty.tools package dotc package typer -import ast._ -import core._ -import Types._, ProtoTypes._, Contexts._, Decorators._, Denotations._, Symbols._ -import Implicits._, Flags._, Constants.Constant -import Trees._ -import NameOps._ +import ast.* +import core.* +import Types.*, ProtoTypes.*, Contexts.*, Decorators.*, Denotations.*, Symbols.* +import Implicits.*, Flags.*, Constants.Constant +import Trees.* +import NameOps.* import util.Spans.NoSpan import util.SrcPos import config.Feature -import reporting._ +import reporting.* import collection.mutable object ErrorReporting { - import tpd._ + import tpd.* def errorTree(tree: untpd.Tree, msg: Message, pos: SrcPos)(using Context): tpd.Tree = tree.withType(errorType(msg, pos)) diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index 8e5ec7525d48..2c441c2f915e 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -2,18 +2,18 @@ package dotty.tools package dotc package typer -import core._ +import core.* import ast.{Trees, untpd, tpd} -import Contexts._ -import Types._ -import Flags._ -import Symbols._ -import Names._ +import Contexts.* +import Types.* +import Flags.* +import Symbols.* +import Names.* import NameKinds.UniqueName -import util.Spans._ +import util.Spans.* import util.Property import collection.mutable -import Trees._ +import Trees.* /** A class that handles argument lifting. Argument lifting is needed in the following * scenarios: @@ -25,7 +25,7 @@ import Trees._ * arguments can be duplicated as arguments to default argument methods. */ abstract class Lifter { - import tpd._ + import tpd.* /** Test indicating `expr` does not need lifting */ def noLift(expr: Tree)(using Context): Boolean @@ -208,7 +208,7 @@ object LiftToDefs extends LiftComplex { /** Lifter for eta expansion */ object EtaExpansion extends LiftImpure { - import tpd._ + import tpd.* /** Eta-expanding a tree means converting a method reference to a function value. * @param tree The tree to expand @@ -264,7 +264,7 @@ object EtaExpansion extends LiftImpure { * But see comment on the `ExprType` case in function `prune` in class `ConstraintHandling`. */ def etaExpand(tree: Tree, mt: MethodType, xarity: Int)(using Context): untpd.Tree = { - import untpd._ + import untpd.* assert(!ctx.isAfterTyper || (ctx.phase eq ctx.base.inliningPhase), ctx.phase) val defs = new mutable.ListBuffer[tpd.Tree] val lifted: Tree = TypedSplice(liftApp(defs, tree)) diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 108ede2aa509..83b92f3b2342 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -3,35 +3,35 @@ package dotc package typer import backend.sjs.JSDefinitions -import core._ +import core.* import ast.{TreeTypeMap, untpd, tpd} -import util.Spans._ +import util.Spans.* import util.Stats.{record, monitored} import printing.{Showable, Printer} -import printing.Texts._ -import Contexts._ -import Types._ -import Flags._ +import printing.Texts.* +import Contexts.* +import Types.* +import Flags.* import Mode.ImplicitsEnabled import NameKinds.{LazyImplicitName, ContextBoundParamName} -import Symbols._ -import Types._ -import Decorators._ -import Names._ -import StdNames._ -import ProtoTypes._ -import ErrorReporting._ +import Symbols.* +import Types.* +import Decorators.* +import Names.* +import StdNames.* +import ProtoTypes.* +import ErrorReporting.* import Inferencing.{fullyDefinedType, isFullyDefined} import Scopes.newScope import Typer.BindingPrec, BindingPrec.* -import transform.TypeUtils._ -import Hashable._ +import transform.TypeUtils.* +import Hashable.* import util.{EqHashMap, Stats} import config.{Config, Feature} import Feature.migrateTo3 import config.Printers.{implicits, implicitsDetailed} import collection.mutable -import reporting._ +import reporting.* import transform.Splicer import annotation.tailrec @@ -41,7 +41,7 @@ import scala.compiletime.uninitialized /** Implicit resolution */ object Implicits: - import tpd._ + import tpd.* /** An implicit definition `implicitRef` that is visible under a different name, `alias`. * Gets generated if an implicit ref is imported via a renaming import. @@ -597,7 +597,7 @@ object Implicits: } end Implicits -import Implicits._ +import Implicits.* /** Info relating to implicits that is kept for one run */ trait ImplicitRunInfo: @@ -845,7 +845,7 @@ end ImplicitRunInfo trait Implicits: self: Typer => - import tpd._ + import tpd.* override def viewExists(from: Type, to: Type)(using Context): Boolean = !from.isError @@ -1842,7 +1842,7 @@ final class SearchRoot extends SearchHistory: result match { case failure: SearchFailure => failure case success: SearchSuccess => - import tpd._ + import tpd.* // We might have accumulated dictionary entries for by name implicit arguments // which are not in fact used recursively either directly in the outermost result diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index 4850e7bdffa9..78cba674bfff 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -3,10 +3,10 @@ package dotc package typer import ast.{tpd, untpd} -import core._ +import core.* import printing.{Printer, Showable} import util.SimpleIdentityMap -import Symbols._, Names._, Types._, Contexts._, StdNames._, Flags._ +import Symbols.*, Names.*, Types.*, Contexts.*, StdNames.*, Flags.* import Implicits.RenamedImplicitRef import StdNames.nme import printing.Texts.Text diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 66d4a803494d..7615fbda9f0a 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -3,11 +3,11 @@ package dotc package typer import backend.sjs.JSDefinitions -import core._ -import Contexts._, Types._, Symbols._, Names._, Decorators._, ProtoTypes._ -import Flags._, SymDenotations._ +import core.* +import Contexts.*, Types.*, Symbols.*, Names.*, Decorators.*, ProtoTypes.* +import Flags.*, SymDenotations.* import NameKinds.FlatName -import StdNames._ +import StdNames.* import config.Printers.{implicits, implicitsDetailed} import ast.{untpd, tpd} import Implicits.{hasExtMethod, Candidate} @@ -25,7 +25,7 @@ trait ImportSuggestions: /** The maximal number of suggested imports to make */ inline val MaxSuggestions = 10 - import tpd._ + import tpd.* /** Timeout to test a single implicit value as a suggestion, in ms */ private inline val testOneImplicitTimeOut = 500 diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 89368d948448..2f43792efe8b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -2,23 +2,23 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._, Types._, Flags._, Symbols._ -import ProtoTypes._ +import core.* +import ast.* +import Contexts.*, Types.*, Flags.*, Symbols.* +import ProtoTypes.* import NameKinds.UniqueName -import util.Spans._ +import util.Spans.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos} -import Decorators._ +import Decorators.* import config.Printers.{gadts, typr} import annotation.tailrec -import reporting._ +import reporting.* import collection.mutable import scala.annotation.internal.sharable object Inferencing { - import tpd._ + import tpd.* /** Is type fully defined, meaning the type does not contain wildcard types * or uninstantiated type variables. As a side effect, this will minimize @@ -550,8 +550,8 @@ object Inferencing { } trait Inferencing { this: Typer => - import Inferencing._ - import tpd._ + import Inferencing.* + import tpd.* /** Interpolate undetermined type variables in the widened type of this tree. * @param tree the tree whose type is interpolated diff --git a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala index 89caf5e1c474..bbc34bc692f9 100644 --- a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala @@ -1,8 +1,8 @@ package dotty.tools.dotc package typer -import core.Contexts._ -import ast.tpd._ +import core.Contexts.* +import ast.tpd.* /** PostTyper doesn't run on java sources, * but some checks still need to be applied. diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 5361f37c2a76..1e8460764b9b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Trees._, StdNames._, Scopes._, Denotations._, NamerOps._, ContextOps._ -import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._ -import Decorators._, Comments.{_, given} +import core.* +import ast.* +import Trees.*, StdNames.*, Scopes.*, Denotations.*, NamerOps.*, ContextOps.* +import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.* +import Decorators.*, Comments.{_, given} import NameKinds.DefaultGetterName -import ast.desugar, ast.desugar._ -import ProtoTypes._ -import util.Spans._ +import ast.desugar, ast.desugar.* +import ProtoTypes.* +import util.Spans.* import util.Property import collection.mutable import tpd.tpes @@ -20,15 +20,15 @@ import config.Printers.typr import inlines.{Inlines, PrepareInlineable} import parsing.JavaParsers.JavaParser import parsing.Parsers.Parser -import Annotations._ -import Inferencing._ -import transform.ValueClasses._ -import transform.TypeUtils._ -import transform.SymUtils._ +import Annotations.* +import Inferencing.* +import transform.ValueClasses.* +import transform.TypeUtils.* +import transform.SymUtils.* import TypeErasure.erasure -import reporting._ +import reporting.* import config.Feature.sourceVersion -import config.SourceVersion._ +import config.SourceVersion.* import scala.compiletime.uninitialized @@ -54,7 +54,7 @@ import scala.compiletime.uninitialized */ class Namer { typer: Typer => - import untpd._ + import untpd.* val TypedAhead : Property.Key[tpd.Tree] = new Property.Key val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key @@ -1260,7 +1260,7 @@ class Namer { typer: Typer => if forwarder.isType then buf += tpd.TypeDef(forwarder.asType).withSpan(span) else - import tpd._ + import tpd.* def extensionParamsCount(pt: Type): Int = pt match case pt: MethodOrPoly => 1 + extensionParamsCount(pt.resType) case _ => 0 diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 387e58294dc6..cc3fac3a6ffd 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package typer -import core._ -import Types._, Contexts._, Symbols._, Decorators._, Constants._ +import core.* +import Types.*, Contexts.*, Symbols.*, Decorators.*, Constants.* import annotation.tailrec import StdNames.nme import util.Property import Names.Name import util.Spans.Span -import Flags._ -import NullOpsDecorator._ +import Flags.* +import NullOpsDecorator.* import collection.mutable import config.Printers.nullables import ast.{tpd, untpd} @@ -18,7 +18,7 @@ import ast.Trees.mods /** Operations for implementing a flow analysis for nullability */ object Nullables: - import ast.tpd._ + import ast.tpd.* def importUnsafeNulls(using Context): Import = Import( ref(defn.LanguageModule), @@ -422,7 +422,7 @@ object Nullables: * because of shadowing. */ def assignmentSpans(using Context): Map[Int, List[Span]] = - import ast.untpd._ + import ast.untpd.* object populate extends UntypedTreeTraverser: diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 6a3f0d0ea73b..6cbddeb964cf 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -2,15 +2,15 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._, Types._, Denotations._, Names._, StdNames._, NameOps._, Symbols._ +import core.* +import ast.* +import Contexts.*, Types.*, Denotations.*, Names.*, StdNames.*, NameOps.*, Symbols.* import NameKinds.DepParamName -import Trees._ -import Constants._ +import Trees.* +import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} -import Decorators._ -import Uniques._ +import Decorators.* +import Uniques.* import inlines.Inlines import config.Printers.typr import Inferencing.* @@ -25,7 +25,7 @@ import dotty.tools.dotc.util.Spans.{NoSpan, Span} object ProtoTypes { - import tpd._ + import tpd.* /** A trait defining an `isCompatible` method. */ trait Compatibility { diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index bda2c25c26b8..75e1aed9da21 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -1,28 +1,28 @@ package dotty.tools.dotc package typer -import dotty.tools.dotc.ast._ -import dotty.tools.dotc.config.Feature._ -import dotty.tools.dotc.config.SourceVersion._ -import dotty.tools.dotc.core._ -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.ast.* +import dotty.tools.dotc.config.Feature.* +import dotty.tools.dotc.config.SourceVersion.* +import dotty.tools.dotc.core.* +import dotty.tools.dotc.core.Annotations.* +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName -import dotty.tools.dotc.core.Names._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.inlines.PrepareInlineable import dotty.tools.dotc.quoted.QuotePatterns import dotty.tools.dotc.staging.StagingLevel.* -import dotty.tools.dotc.transform.SymUtils._ +import dotty.tools.dotc.transform.SymUtils.* import dotty.tools.dotc.typer.ErrorReporting.errorTree -import dotty.tools.dotc.typer.Implicits._ -import dotty.tools.dotc.typer.Inferencing._ +import dotty.tools.dotc.typer.Implicits.* +import dotty.tools.dotc.typer.Inferencing.* import dotty.tools.dotc.util.Property -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.util.Stats.record import dotty.tools.dotc.reporting.IllegalVariableInPatternAlternative import scala.collection.mutable @@ -266,7 +266,7 @@ trait QuotesAndSplices { } object QuotesAndSplices { - import tpd._ + import tpd.* /** Key for mapping from quoted pattern type variable names into their symbol */ private val TypeVariableKey = new Property.Key[collection.mutable.Map[TypeName, Symbol]] diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 20dfe07c3be5..e152b5e6b9c7 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -1,17 +1,17 @@ package dotty.tools.dotc package typer -import core._ -import Contexts._ -import Types._ -import Symbols._ -import StdNames._ -import Decorators._ -import typer.ProtoTypes._ +import core.* +import Contexts.* +import Types.* +import Symbols.* +import StdNames.* +import Decorators.* +import typer.ProtoTypes.* import ast.{tpd, untpd} import scala.util.control.NonFatal import util.Spans.Span -import Nullables._ +import Nullables.* import staging.StagingLevel.* /** A version of Typer that keeps all symbols defined and referenced in a @@ -23,7 +23,7 @@ import staging.StagingLevel.* * Otherwise, everything is as in Typer. */ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking { - import tpd._ + import tpd.* private def assertTyped(tree: untpd.Tree)(using Context): Unit = assert(tree.hasType, i"$tree ${tree.getClass} ${tree.uniqueId}") diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index af279844f370..12694bf4c6cc 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -2,27 +2,27 @@ package dotty.tools package dotc package typer -import transform._ -import core._ -import Symbols._, Types._, Contexts._, Flags._, Names._, NameOps._, NameKinds._ -import StdNames._, Denotations._, SymUtils._, Phases._, SymDenotations._ +import transform.* +import core.* +import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, NameOps.*, NameKinds.* +import StdNames.*, Denotations.*, SymUtils.*, Phases.*, SymDenotations.* import NameKinds.DefaultGetterName -import util.Spans._ +import util.Spans.* import scala.collection.mutable -import ast._ -import MegaPhase._ +import ast.* +import MegaPhase.* import config.Printers.{checks, noPrinter, capt} -import Decorators._ +import Decorators.* import OverridingPairs.isOverridingPair -import typer.ErrorReporting._ +import typer.ErrorReporting.* import config.Feature.{warnOnMigration, migrateTo3, sourceVersion} import config.SourceVersion.{`3.0`, `future`} import config.Printers.refcheck -import reporting._ +import reporting.* import Constants.Constant object RefChecks { - import tpd._ + import tpd.* val name: String = "refchecks" val description: String = "checks related to abstract members and overriding" @@ -1131,7 +1131,7 @@ object RefChecks { report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) } -import RefChecks._ +import RefChecks.* /** Post-attribution checking and transformation, which fulfills the following roles * @@ -1165,7 +1165,7 @@ import RefChecks._ */ class RefChecks extends MiniPhase { thisPhase => - import tpd._ + import tpd.* override def phaseName: String = RefChecks.name diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 363d6e5ba411..6e1302c88398 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -2,25 +2,25 @@ package dotty.tools package dotc package typer -import core._ +import core.* import util.Spans.Span -import Contexts._ -import Types._, Flags._, Symbols._, Names._, StdNames._, Constants._ +import Contexts.* +import Types.*, Flags.*, Symbols.*, Names.*, StdNames.*, Constants.* import TypeErasure.{erasure, hasStableErasure} -import Decorators._ -import ProtoTypes._ +import Decorators.* +import ProtoTypes.* import Inferencing.{fullyDefinedType, isFullyDefined} import ast.untpd -import transform.SymUtils._ -import transform.TypeUtils._ -import transform.SyntheticMembers._ +import transform.SymUtils.* +import transform.TypeUtils.* +import transform.SyntheticMembers.* import util.Property import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} -import ast.tpd._ -import Synthesizer._ +import ast.tpd.* +import Synthesizer.* import sbt.ExtractDependencies.* -import xsbti.api.DependencyContext._ +import xsbti.api.DependencyContext.* /** Synthesize terms for special classes */ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 8ded39030a1e..d2b21ea9e4a8 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -2,14 +2,14 @@ package dotty.tools package dotc package typer -import core._ -import ast._ -import Contexts._, ContextOps._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._ -import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._ +import core.* +import ast.* +import Contexts.*, ContextOps.*, Constants.*, Types.*, Symbols.*, Names.*, Flags.*, Decorators.* +import ErrorReporting.*, Annotations.*, Denotations.*, SymDenotations.*, StdNames.* import util.SrcPos -import NameOps._ +import NameOps.* import collection.mutable -import reporting._ +import reporting.* import Checking.{checkNoPrivateLeaks, checkNoWildcard} import cc.CaptureSet diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 66c79658b6ab..ac6eec11ac23 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -3,53 +3,53 @@ package dotc package typer import backend.sjs.JSDefinitions -import core._ -import ast._ -import Trees._ -import Constants._ -import StdNames._ -import Scopes._ -import Denotations._ -import ProtoTypes._ -import Contexts._ -import Symbols._ -import Types._ -import SymDenotations._ -import Annotations._ -import Names._ -import NameOps._ -import NameKinds._ -import NamerOps._ -import ContextOps._ -import Flags._ -import Decorators._ -import ErrorReporting._ -import Checking._ -import Inferencing._ +import core.* +import ast.* +import Trees.* +import Constants.* +import StdNames.* +import Scopes.* +import Denotations.* +import ProtoTypes.* +import Contexts.* +import Symbols.* +import Types.* +import SymDenotations.* +import Annotations.* +import Names.* +import NameOps.* +import NameKinds.* +import NamerOps.* +import ContextOps.* +import Flags.* +import Decorators.* +import ErrorReporting.* +import Checking.* +import Inferencing.* import Dynamic.isDynamicExpansion import EtaExpansion.etaExpand import TypeComparer.CompareResult import inlines.{Inlines, PrepareInlineable} -import util.Spans._ -import util.common._ +import util.Spans.* +import util.common.* import util.{Property, SimpleIdentityMap, SrcPos} import Applications.{tupleComponentTypes, wrapDefs, defaultArgument} import collection.mutable import annotation.tailrec -import Implicits._ +import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} import config.Feature import config.Feature.{sourceVersion, migrateTo3} -import config.SourceVersion._ +import config.SourceVersion.* import rewrites.Rewrites.patch import staging.StagingLevel -import transform.SymUtils._ -import transform.TypeUtils._ -import reporting._ -import Nullables._ -import NullOpsDecorator._ +import transform.SymUtils.* +import transform.TypeUtils.* +import reporting.* +import Nullables.* +import NullOpsDecorator.* import cc.CheckCaptures import config.Config @@ -130,7 +130,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer with QuotesAndSplices with Deriving { - import Typer._ + import Typer.* import tpd.{cpy => _, _} import untpd.cpy @@ -217,7 +217,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * or else `NoContext` if nothing was found yet. */ def findRefRecur(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = { - import BindingPrec._ + import BindingPrec.* /** Check that any previously found result from an inner context * does properly shadow the new one from an outer context. @@ -821,8 +821,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedNumber(tree: untpd.Number, pt: Type)(using Context): Tree = { - import scala.util.FromDigits._ - import untpd.NumberKind._ + import scala.util.FromDigits.* + import untpd.NumberKind.* record("typedNumber") val digits = tree.digits val target = pt.dealias @@ -904,7 +904,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedNew(tree: untpd.New, pt: Type)(using Context): Tree = tree.tpt match { case templ: untpd.Template => - import untpd._ + import untpd.* var templ1 = templ def isEligible(tp: Type) = tp.exists @@ -948,7 +948,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case id: untpd.Ident if (ctx.mode is Mode.Pattern) && untpd.isVarPattern(id) => if (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) ifPat else { - import untpd._ + import untpd.* typed(Bind(id.name, Typed(Ident(wildName), tree.tpt)).withSpan(tree.span), pt) } case _ => ifExpr diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 857ed1bad4d9..b79235f4f819 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -2,16 +2,16 @@ package dotty.tools package dotc package typer -import core._ +import core.* import Run.SubPhase -import Phases._ -import Contexts._ -import Symbols._ +import Phases.* +import Contexts.* +import Symbols.* import ImportInfo.withRootImports import parsing.{Parser => ParserPhase} import config.Printers.typr import inlines.PrepareInlineable -import util.Stats._ +import util.Stats.* /** * diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index 7d129e128518..3699ca80d011 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -2,11 +2,11 @@ package dotty.tools.dotc package typer import dotty.tools.dotc.ast.{ Trees, tpd } -import core._ -import Types._, Contexts._, Flags._, Symbols._, Trees._ -import Decorators._ -import Variances._ -import NameKinds._ +import core.* +import Types.*, Contexts.*, Flags.*, Symbols.*, Trees.* +import Decorators.* +import Variances.* +import NameKinds.* import util.SrcPos import config.Printers.variances import config.Feature.migrateTo3 @@ -65,8 +65,8 @@ object VarianceChecker { } class VarianceChecker(using Context) { - import VarianceChecker._ - import tpd._ + import VarianceChecker.* + import tpd.* private object Validator extends TypeAccumulator[Option[VarianceError]] { private var base: Symbol = uninitialized diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala index b4af59c09310..7224e28fe477 100644 --- a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala +++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala @@ -17,7 +17,7 @@ import scala.collection.mutable * handled by scaladoc. */ object CommentParsing { - import Chars._ + import Chars.* /** Returns index of string `str` following `start` skipping longest * sequence of whitespace characters characters (but no newlines) diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala index 99ee8a80227b..e124159575e7 100644 --- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala +++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala @@ -18,7 +18,7 @@ import annotation.tailrec * at the `last` position. */ class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { - import LRUCache._ + import LRUCache.* val keys: Array[Key] = new Array[Key](Retained) val values: Array[Value] = new Array(Retained) var next: SixteenNibbles = new SixteenNibbles(initialRing.bits) diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala index 61cf238fbc7f..f641ea90dcdd 100644 --- a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala @@ -4,7 +4,7 @@ package util import scala.language.unsafeNulls -import core.Names._ +import core.Names.* import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala index ac724f7e336f..7509ef66e533 100644 --- a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala +++ b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala @@ -3,9 +3,9 @@ package dotty.tools.dotc.util import scala.language.unsafeNulls import dotty.tools.dotc.core.Comments.{Comment, CommentsContext} -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.printing.SyntaxHighlighting import scala.Console.{BOLD, RESET} diff --git a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala index 4dd897dd082a..ec88b5880745 100644 --- a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala +++ b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.util import scala.collection.mutable.ArrayBuffer -import scala.util.chaining._ +import scala.util.chaining.* /** A wrapper for a list of cached instances of a type `T`. * The wrapper is recursion-reentrant: several instances are kept, so diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala index f85a57a8f812..8d5d0c27ab0e 100644 --- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala +++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala @@ -8,10 +8,10 @@ import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble import core.unpickleScala2.PickleBuffer -import core.Names._ +import core.Names.* object ShowPickled { - import core.unpickleScala2.PickleFormat._ + import core.unpickleScala2.PickleFormat.* case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) { def isName: Boolean = tag == TERMname || tag == TYPEname diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 5bb79642278d..c3779d3473cf 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -1,21 +1,21 @@ package dotty.tools.dotc package util -import ast.Trees._ +import ast.Trees.* import ast.tpd import core.Constants.Constant -import core.Contexts._ +import core.Contexts.* import core.Denotations.{SingleDenotation, Denotation} import core.Flags import core.NameOps.isUnapplyName -import core.Names._ +import core.Names.* import core.NameKinds -import core.Types._ +import core.Types.* import core.Symbols.NoSymbol import interactive.Interactive import transform.SymUtils.isLocalToBlock import util.Spans.Span -import reporting._ +import reporting.* object Signatures { diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala index 42286aef5d31..b51f6bdcac61 100644 --- a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala +++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala @@ -5,7 +5,7 @@ package dotty.tools.dotc.util * */ class SixteenNibbles(val bits: Long) extends AnyVal { - import SixteenNibbles._ + import SixteenNibbles.* def apply(idx: Int): Int = (bits >>> (idx * Width)).toInt & Mask diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index d35509f591a3..9da4f58f2deb 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -4,12 +4,12 @@ package util import scala.language.unsafeNulls -import dotty.tools.io._ -import Spans._ -import core.Contexts._ +import dotty.tools.io.* +import Spans.* +import core.Contexts.* import scala.io.Codec -import Chars._ +import Chars.* import scala.annotation.internal.sharable import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -62,7 +62,7 @@ object ScriptSourceFile { } class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile { - import SourceFile._ + import SourceFile.* private var myContent: Array[Char] | Null = null @@ -260,7 +260,7 @@ object SourceFile { // and use both slashes as separators, or on other OS and use forward slash // as separator, backslash as file name character. - import scala.jdk.CollectionConverters._ + import scala.jdk.CollectionConverters.* val path = refPath.relativize(sourcePath) path.iterator.asScala.mkString("/") else diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 29f9a34d2292..904704b2349c 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -5,7 +5,7 @@ package util import scala.language.unsafeNulls import printing.{Showable, Printer} -import printing.Texts._ +import printing.Texts.* import core.Contexts.Context import Spans.{Span, NoSpan} import scala.annotation.internal.sharable diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index 5e136856b718..750a799a9f0a 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -4,7 +4,7 @@ package util import scala.annotation.internal.sharable -import core.Contexts._ +import core.Contexts.* import collection.mutable @sharable object Stats { diff --git a/compiler/src/dotty/tools/dotc/util/Store.scala b/compiler/src/dotty/tools/dotc/util/Store.scala index d8c9230b9272..8605b9021980 100644 --- a/compiler/src/dotty/tools/dotc/util/Store.scala +++ b/compiler/src/dotty/tools/dotc/util/Store.scala @@ -8,7 +8,7 @@ object Store { } class Store(private val elems: Array[AnyRef | Null]) extends AnyVal { - import Store._ + import Store.* def newLocation[T](): (Location[T], Store) = { val elems1 = new Array[AnyRef | Null](elems.length + 1) diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 975826a87a37..d93505f6f3c2 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -6,7 +6,7 @@ import java.lang.ref.{ReferenceQueue, WeakReference} import scala.annotation.{ constructorOnly, tailrec } -import dotty.tools._ +import dotty.tools.* /** * A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other @@ -20,7 +20,7 @@ import dotty.tools._ */ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Double = 0.5) extends MutableSet[A] { - import WeakHashSet._ + import WeakHashSet.* type This = WeakHashSet[A] diff --git a/compiler/src/dotty/tools/dotc/util/kwords.sc b/compiler/src/dotty/tools/dotc/util/kwords.sc index 961be3b0aa23..377be9dbcb65 100644 --- a/compiler/src/dotty/tools/dotc/util/kwords.sc +++ b/compiler/src/dotty/tools/dotc/util/kwords.sc @@ -1,8 +1,8 @@ package dotty.tools.dotc.util -import dotty.tools.dotc.parsing._ -import Scanners._ -import Tokens._ +import dotty.tools.dotc.parsing.* +import Scanners.* +import Tokens.* object kwords { println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index 2a1b0fc283e9..8df4015a53c2 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -21,7 +21,7 @@ import dotc.classpath.{ PackageEntry, ClassPathEntries, PackageName } * A representation of the compiler's class- or sourcepath. */ trait ClassPath { - import dotty.tools.dotc.classpath._ + import dotty.tools.dotc.classpath.* def asURLs: Seq[URL] final def hasPackage(pkg: String): Boolean = hasPackage(PackageName(pkg)) diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 27f2c077dd6a..efce60d3f86d 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -12,7 +12,7 @@ import scala.language.unsafeNulls import java.io.{File => JavaIoFile, _} import java.nio.file.{Files, Paths} -import java.nio.file.StandardOpenOption._ +import java.nio.file.StandardOpenOption.* import scala.io.Codec /** diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 667dbd5965f8..3e65d2f7635d 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -10,8 +10,8 @@ package io import scala.language.unsafeNulls import java.io.{ InputStream, OutputStream, DataOutputStream } -import java.util.jar._ -import scala.jdk.CollectionConverters._ +import java.util.jar.* +import scala.jdk.CollectionConverters.* import scala.collection.mutable import Attributes.Name import scala.annotation.tailrec @@ -42,7 +42,7 @@ class Jar(file: File) { protected def errorFn(msg: String): Unit = Console println msg - import Jar._ + import Jar.* lazy val jarFile: JarFile = new JarFile(file.jpath.toFile) lazy val manifest: Option[Manifest] = withJarInput(s => Option(s.getManifest)) @@ -142,7 +142,7 @@ object Jar { def underlying: JManifest = manifest def attrs: mutable.Map[Name, String] = manifest.getMainAttributes().asInstanceOf[AttributeMap].asScala withDefaultValue null def initialMainAttrs: Map[Attributes.Name, String] = { - import scala.util.Properties._ + import scala.util.Properties.* Map( Name.MANIFEST_VERSION -> "1.0", ScalaCompilerVersion -> versionNumberString diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index 8f3b5d8010e4..49b743e83074 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -4,7 +4,7 @@ import scala.language.unsafeNulls import java.nio.file.{FileSystemAlreadyExistsException, FileSystems} -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* /** * This class implements an [[AbstractFile]] backed by a jar diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index dddb870afc65..c8420c5e381d 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -8,11 +8,11 @@ package dotty.tools.io import scala.language.unsafeNulls import java.io.RandomAccessFile -import java.nio.file._ +import java.nio.file.* import java.net.{URI, URL} import java.nio.file.attribute.{BasicFileAttributes, FileTime} import java.io.IOException -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import scala.util.Random.alphanumeric /** An abstraction for filesystem paths. The differences between @@ -63,7 +63,7 @@ object Path { private[io] def randomPrefix: String = alphanumeric take 6 mkString "" private[io] def fail(msg: String): Nothing = throw FileOperationException(msg) } -import Path._ +import Path.* /** The Path constructor is private so we can enforce some * semantics regarding how a Path might relate to the world. diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index e28b7de7983d..9e6d5fe4796b 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -13,7 +13,7 @@ import java.nio.file.Files import java.util.zip.{ ZipEntry, ZipFile } import java.util.jar.Manifest import scala.collection.mutable -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* /** An abstraction for zip files and streams. Everything is written the way * it is for performance: we come through here a lot on every run. Be careful @@ -52,7 +52,7 @@ object ZipArchive { else path.substring(idx + 1) } } -import ZipArchive._ +import ZipArchive.* /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) extends AbstractFile with Equals { self => diff --git a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala index 8a12ae22be37..860c4a9372f9 100644 --- a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala +++ b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala @@ -1,7 +1,7 @@ package dotty.tools.repl import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.Phase import scala.compiletime.uninitialized @@ -12,7 +12,7 @@ import scala.compiletime.uninitialized * after Typer. */ class CollectTopLevelImports extends Phase { - import tpd._ + import tpd.* def phaseName: String = "collectTopLevelImports" diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 8e048d786ae1..294f0a331ec2 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -2,15 +2,15 @@ package dotty.tools.repl import scala.language.unsafeNulls -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.parsing.Scanners.Scanner -import dotty.tools.dotc.parsing.Tokens._ +import dotty.tools.dotc.parsing.Tokens.* import dotty.tools.dotc.printing.SyntaxHighlighting import dotty.tools.dotc.reporting.Reporter import dotty.tools.dotc.util.SourceFile import org.jline.reader import org.jline.reader.Parser.ParseContext -import org.jline.reader._ +import org.jline.reader.* import org.jline.reader.impl.LineReaderImpl import org.jline.reader.impl.history.DefaultHistory import org.jline.terminal.TerminalBuilder @@ -50,8 +50,8 @@ class JLineTerminal extends java.io.Closeable { def readLine( completer: Completer // provide auto-completions )(using Context): String = { - import LineReader.Option._ - import LineReader._ + import LineReader.Option.* + import LineReader.* val userHome = System.getProperty("user.home") val lineReader = LineReaderBuilder .builder() diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala index a67b247066f7..b9139343bca1 100644 --- a/compiler/src/dotty/tools/repl/ParseResult.scala +++ b/compiler/src/dotty/tools/repl/ParseResult.scala @@ -3,7 +3,7 @@ package repl import dotc.CompilationUnit import dotc.ast.untpd -import dotc.core.Contexts._ +import dotc.core.Contexts.* import dotc.core.StdNames.str import dotc.parsing.Parsers.Parser import dotc.parsing.Tokens diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index 972baa39877b..487b6ce3924f 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -23,7 +23,7 @@ import scala.util.control.NonFatal */ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): - import Rendering._ + import Rendering.* var myClassLoader: AbstractFileClassLoader = uninitialized diff --git a/compiler/src/dotty/tools/repl/ReplCommand.scala b/compiler/src/dotty/tools/repl/ReplCommand.scala index 3e46106acc2c..0b40a7cec0b3 100644 --- a/compiler/src/dotty/tools/repl/ReplCommand.scala +++ b/compiler/src/dotty/tools/repl/ReplCommand.scala @@ -1,6 +1,6 @@ package dotty.tools.repl -import dotty.tools.dotc.config.Properties._ +import dotty.tools.dotc.config.Properties.* import dotty.tools.dotc.config.CompilerCommand object ReplCommand extends CompilerCommand: diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index d3a5561b6080..af3fb32c3e86 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -1,23 +1,23 @@ package dotty.tools.repl -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.{tpd, untpd} import dotty.tools.dotc.ast.tpd.TreeOps -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Names._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.Symbols._ +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.reporting.Diagnostic import dotty.tools.dotc.transform.PostTyper import dotty.tools.dotc.typer.ImportInfo.{withRootImports, RootRef} import dotty.tools.dotc.typer.TyperPhase -import dotty.tools.dotc.util.Spans._ +import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.util.{ParsedComment, Property, SourceFile} import dotty.tools.dotc.{CompilationUnit, Compiler, Run} -import dotty.tools.repl.results._ +import dotty.tools.repl.results.* import scala.collection.mutable import scala.util.chaining.given @@ -156,7 +156,7 @@ class ReplCompiler extends Compiler: def wrapped(expr: String, sourceFile: SourceFile, state: State)(using Context): Result[untpd.PackageDef] = { def wrap(trees: List[untpd.Tree]): untpd.PackageDef = { - import untpd._ + import untpd.* val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, unitLiteral).withSpan(Span(0, expr.length))) val tmpl = Template(emptyConstructor, Nil, Nil, EmptyValDef, List(valdef)) @@ -186,7 +186,7 @@ class ReplCompiler extends Compiler: sourceFile.atSpan(Span(0, sourceFile.content.length)))).errors def unwrappedTypeTree(tree: tpd.Tree, sourceFile0: SourceFile)(using Context): Result[tpd.ValDef] = { - import tpd._ + import tpd.* tree match { case PackageDef(_, List(TypeDef(_, tmpl: Template))) => tmpl.body @@ -198,7 +198,7 @@ class ReplCompiler extends Compiler: } def unwrappedUntypedTree(tree: untpd.Tree, sourceFile0: SourceFile)(using Context): Result[untpd.ValDef] = - import untpd._ + import untpd.* tree match { case PackageDef(_, List(TypeDef(_, tmpl: Template))) => tmpl.body diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 6bb3d21dd45f..5226ef0b4546 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -5,21 +5,21 @@ import scala.language.unsafeNulls import java.io.{File => JFile, PrintStream} import java.nio.charset.StandardCharsets -import dotty.tools.dotc.ast.Trees._ +import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.{tpd, untpd} import dotty.tools.dotc.config.CommandLineParser.tokenize import dotty.tools.dotc.config.Properties.{javaVersion, javaVmName, simpleVersionString} -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Phases.{unfusedPhases, typerPhase} import dotty.tools.dotc.core.Denotations.Denotation -import dotty.tools.dotc.core.Flags._ +import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.NameKinds.SimpleNameKind import dotty.tools.dotc.core.NameKinds.DefaultGetterName -import dotty.tools.dotc.core.NameOps._ +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.Symbols.{Symbol, defn} import dotty.tools.dotc.interfaces import dotty.tools.dotc.interactive.Completion @@ -30,14 +30,14 @@ import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.{CompilationUnit, Driver} import dotty.tools.dotc.config.CompilerCommand -import dotty.tools.io._ +import dotty.tools.io.* import dotty.tools.runner.ScalaClassLoader.* -import org.jline.reader._ +import org.jline.reader.* import scala.annotation.tailrec import scala.collection.mutable import scala.compiletime.uninitialized -import scala.jdk.CollectionConverters._ +import scala.jdk.CollectionConverters.* import scala.util.control.NonFatal import scala.util.Using diff --git a/compiler/src/dotty/tools/runner/ObjectRunner.scala b/compiler/src/dotty/tools/runner/ObjectRunner.scala index cb8f9d791dfa..62dbcc32f30d 100644 --- a/compiler/src/dotty/tools/runner/ObjectRunner.scala +++ b/compiler/src/dotty/tools/runner/ObjectRunner.scala @@ -19,7 +19,7 @@ trait CommonRunner { * @throws java.lang.reflect.InvocationTargetException */ def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = { - import RichClassLoader._ + import RichClassLoader.* ScalaClassLoader.fromURLsParallelCapable(urls).run(objectName, arguments) } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 0f64d6e23b8e..04472647b9fc 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -7,20 +7,20 @@ import dotty.tools.dotc import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Annotations -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Decorators._ +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.NameKinds -import dotty.tools.dotc.core.NameOps._ -import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.Types import dotty.tools.dotc.NoCompilationUnit import dotty.tools.dotc.quoted.MacroExpansion import dotty.tools.dotc.quoted.PickledQuotes import dotty.tools.dotc.quoted.QuotePatterns -import dotty.tools.dotc.quoted.reflect._ +import dotty.tools.dotc.quoted.reflect.* import scala.quoted.runtime.{QuoteUnpickler, QuoteMatching} -import scala.quoted.runtime.impl.printers._ +import scala.quoted.runtime.impl.printers.* import scala.reflect.TypeTest diff --git a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala index d65328bb5405..705efc5ffab1 100644 --- a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala +++ b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala @@ -1,7 +1,7 @@ package scala.quoted.runtime.impl import dotty.tools.dotc.ast.tpd.Tree -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* class ScopeException(msg: String) extends Exception(msg) diff --git a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala index 797b38be2743..397ad49a309b 100644 --- a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala +++ b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala @@ -1,7 +1,7 @@ package scala.quoted package runtime.impl -import dotty.tools.dotc.core.Contexts._ +import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.Property import dotty.tools.dotc.util.SourcePosition diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index c229338ad228..eac85244d97b 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -1,7 +1,7 @@ package scala.quoted package runtime.impl.printers -import scala.quoted._ +import scala.quoted.* object Extractors { @@ -18,7 +18,7 @@ object Extractors { new ExtractorsPrinter[quotes.type]().visitSymbol(symbol).result() def showFlags(using Quotes)(flags: quotes.reflect.Flags): String = { - import quotes.reflect._ + import quotes.reflect.* val flagList = List.newBuilder[String] if (flags.is(Flags.Abstract)) flagList += "Flags.Abstract" if (flags.is(Flags.Artifact)) flagList += "Flags.Artifact" @@ -64,7 +64,7 @@ object Extractors { } private class ExtractorsPrinter[Q <: Quotes & Singleton](using val quotes: Q) { self => - import quotes.reflect._ + import quotes.reflect.* private val sb: StringBuilder = new StringBuilder diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index cd36e31716a7..4dfb61a59722 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -19,7 +19,7 @@ object SourceCode { symbol.fullName def showFlags(using Quotes)(flags: quotes.reflect.Flags)(syntaxHighlight: SyntaxHighlight): String = { - import quotes.reflect._ + import quotes.reflect.* val flagList = List.newBuilder[String] if (flags.is(Flags.Abstract)) flagList += "abstract" if (flags.is(Flags.Artifact)) flagList += "artifact" @@ -64,8 +64,8 @@ object SourceCode { } private class SourceCodePrinter[Q <: Quotes & Singleton](syntaxHighlight: SyntaxHighlight, fullNames: Boolean)(using val quotes: Q) { - import syntaxHighlight._ - import quotes.reflect._ + import syntaxHighlight.* + import quotes.reflect.* private[this] val sb: StringBuilder = new StringBuilder diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 2d9aaea2be08..6993b8202082 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -1,12 +1,12 @@ package scala import annotation.{experimental, showAsInfix} -import compiletime._ -import compiletime.ops.int._ +import compiletime.* +import compiletime.ops.int.* /** Tuple of arbitrary arity */ sealed trait Tuple extends Product { - import Tuple._ + import Tuple.* /** Create a copy of this tuple as an Array */ inline def toArray: Array[Object] = @@ -292,7 +292,7 @@ case object EmptyTuple extends Tuple { /** Tuple of arbitrary non-zero arity */ sealed trait NonEmptyTuple extends Tuple { - import Tuple._ + import Tuple.* /** Get the i-th element of this tuple. * Equivalent to productElement but with a precise return type. diff --git a/library/src/scala/annotation/MacroAnnotation.scala b/library/src/scala/annotation/MacroAnnotation.scala index 5c39ef45f417..999bc3095a69 100644 --- a/library/src/scala/annotation/MacroAnnotation.scala +++ b/library/src/scala/annotation/MacroAnnotation.scala @@ -2,7 +2,7 @@ package scala package annotation -import scala.quoted._ +import scala.quoted.* /** Base trait for macro annotation implementation. * Macro annotations can transform definitions and add new definitions. @@ -46,7 +46,7 @@ definition that is owned by the package or package object. * * class memoize extends MacroAnnotation: * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - * import quotes.reflect._ + * import quotes.reflect.* * tree match * case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => * (param.tpt.tpe.asType, tpt.tpe.asType) match diff --git a/library/src/scala/annotation/constructorOnly.scala b/library/src/scala/annotation/constructorOnly.scala index c78c16534183..8fefc3b012dd 100644 --- a/library/src/scala/annotation/constructorOnly.scala +++ b/library/src/scala/annotation/constructorOnly.scala @@ -8,7 +8,7 @@ package scala.annotation -import scala.annotation.meta._ +import scala.annotation.meta.* /** An annotation that goes on parameters of classes or traits. It asserts * that the parameter is used only for initialization and is not kept in diff --git a/library/src/scala/annotation/newMain.scala b/library/src/scala/annotation/newMain.scala index 6864b5accd6c..552e4225a648 100644 --- a/library/src/scala/annotation/newMain.scala +++ b/library/src/scala/annotation/newMain.scala @@ -65,8 +65,8 @@ import scala.annotation.meta.param */ @experimental final class newMain extends MainAnnotation[FromString, Any]: - import newMain._ - import MainAnnotation._ + import newMain.* + import MainAnnotation.* private val longArgRegex = "--[a-zA-Z][a-zA-Z0-9]+".r private val shortArgRegex = "-[a-zA-Z]".r diff --git a/library/src/scala/annotation/static.scala b/library/src/scala/annotation/static.scala index b00072b18908..7cee5fc160d5 100644 --- a/library/src/scala/annotation/static.scala +++ b/library/src/scala/annotation/static.scala @@ -1,6 +1,6 @@ package scala.annotation -import scala.annotation.meta._ +import scala.annotation.meta.* /** https://github.com/scala/scala.github.com/pull/491 */ diff --git a/library/src/scala/compiletime/ops/any.scala b/library/src/scala/compiletime/ops/any.scala index f0998058e9f7..e3f030c33634 100644 --- a/library/src/scala/compiletime/ops/any.scala +++ b/library/src/scala/compiletime/ops/any.scala @@ -5,7 +5,7 @@ object any: /** Equality comparison of two singleton types. * ```scala * //{ - * import compiletime.ops.any._ + * import compiletime.ops.any.* * //} * val eq1: 1 == 1 = true * val eq2: 1 == "1" = false @@ -18,7 +18,7 @@ object any: /** Inequality comparison of two singleton types. * ```scala * //{ - * import compiletime.ops.any._ + * import compiletime.ops.any.* * //} * val eq1: 1 != 1 = false * val eq2: 1 != "1" = true @@ -31,7 +31,7 @@ object any: /** Tests if a type is a constant. * ```scala * //{ - * import compiletime.ops.any._ + * import compiletime.ops.any.* * //} * val c1: IsConst[1] = true * val c2: IsConst["hi"] = true @@ -42,7 +42,7 @@ object any: * will be evaluated only at its concrete type application. E.g.: * ```scala * //{ - * import compiletime.ops.any._ + * import compiletime.ops.any.* * //} * //def `isConst`` returns the type `IsConst[X]`, since `X` is not yet known. * def isConst[X] : IsConst[X] = ??? @@ -56,7 +56,7 @@ object any: /** String conversion of a constant singleton type. * ```scala * //{ - * import compiletime.ops.any._ + * import compiletime.ops.any.* * //} * val s1: ToString[1] = "1" * val sTrue: ToString[true] = "true" diff --git a/library/src/scala/compiletime/ops/boolean.scala b/library/src/scala/compiletime/ops/boolean.scala index 3645524607dd..f6a8c3d3b37e 100644 --- a/library/src/scala/compiletime/ops/boolean.scala +++ b/library/src/scala/compiletime/ops/boolean.scala @@ -6,7 +6,7 @@ object boolean: /** Negation of a `Boolean` singleton type. * ```scala * //{ - * import compiletime.ops.boolean._ + * import compiletime.ops.boolean.* * //} * val notFalse: ![false] = true * val notTrue: ![true] = false @@ -18,7 +18,7 @@ object boolean: /** Exclusive disjunction of two `Boolean` singleton types. * ```scala * //{ - * import compiletime.ops.boolean._ + * import compiletime.ops.boolean.* * //} * val a: true ^ true = false * val b: false ^ true = true @@ -30,7 +30,7 @@ object boolean: /** Conjunction of two `Boolean` singleton types. * ```scala * //{ - * import compiletime.ops.boolean._ + * import compiletime.ops.boolean.* * //} * val a: true && true = true * val b: false && true = false @@ -42,7 +42,7 @@ object boolean: /** Disjunction of two `Boolean` singleton types. * ```scala * //{ - * import compiletime.ops.boolean._ + * import compiletime.ops.boolean.* * //} * val a: true || false = true * val b: false || false = false diff --git a/library/src/scala/compiletime/ops/double.scala b/library/src/scala/compiletime/ops/double.scala index 65a2e1dde407..0e038904221e 100644 --- a/library/src/scala/compiletime/ops/double.scala +++ b/library/src/scala/compiletime/ops/double.scala @@ -5,7 +5,7 @@ object double: /** Addition of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val sum: 2.0 + 2.0 = 4.0 * ``` @@ -16,7 +16,7 @@ object double: /** Subtraction of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val sub: 4.0 - 2.0 = 2.0 * ``` @@ -27,7 +27,7 @@ object double: /** Multiplication of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val mul: 4.0 * 2.0 = 8.0 * ``` @@ -38,7 +38,7 @@ object double: /** Integer division of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val div: 5.0 / 2.0 = 2.5 * ``` @@ -49,7 +49,7 @@ object double: /** Remainder of the division of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val mod: 5.0 % 2.0 = 1.0 * ``` @@ -60,7 +60,7 @@ object double: /** Less-than comparison of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val lt1: 4.0 < 2.0 = false * val lt2: 2.0 < 4.0 = true @@ -72,7 +72,7 @@ object double: /** Greater-than comparison of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val gt1: 4.0 > 2.0 = true * val gt2: 2.0 > 2.0 = false @@ -84,7 +84,7 @@ object double: /** Greater-or-equal comparison of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val ge1: 4.0 >= 2.0 = true * val ge2: 2.0 >= 3.0 = false @@ -96,7 +96,7 @@ object double: /** Less-or-equal comparison of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val lt1: 4.0 <= 2.0 = false * val lt2: 2.0 <= 2.0 = true @@ -108,7 +108,7 @@ object double: /** Absolute value of an `Double` singleton type. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val abs: Abs[-1.0] = 1.0 * ``` @@ -119,7 +119,7 @@ object double: /** Negation of an `Double` singleton type. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val neg1: Negate[-1.0] = 1.0 * val neg2: Negate[1.0] = -1.0 @@ -131,7 +131,7 @@ object double: /** Minimum of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val min: Min[-1.0, 1.0] = -1.0 * ``` @@ -142,7 +142,7 @@ object double: /** Maximum of two `Double` singleton types. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val max: Max[-1.0, 1.0] = 1.0 * ``` @@ -153,7 +153,7 @@ object double: /** Int conversion of a `Double` singleton type. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val x: ToInt[1.0] = 1 * ``` @@ -164,7 +164,7 @@ object double: /** Long conversion of a `Double` singleton type. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val x: ToLong[1.0] = 1L * ``` @@ -175,7 +175,7 @@ object double: /** Float conversion of a `Double` singleton type. * ```scala * //{ - * import compiletime.ops.double._ + * import compiletime.ops.double.* * //} * val x: ToFloat[1.0] = 1.0f * ``` diff --git a/library/src/scala/compiletime/ops/float.scala b/library/src/scala/compiletime/ops/float.scala index 0f2a8faa43e7..d7be87be3d9c 100644 --- a/library/src/scala/compiletime/ops/float.scala +++ b/library/src/scala/compiletime/ops/float.scala @@ -5,7 +5,7 @@ object float: /** Addition of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val sum: 2.0f + 2.0f = 4.0f * ``` @@ -16,7 +16,7 @@ object float: /** Subtraction of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val sub: 4.0f - 2.0f = 2.0f * ``` @@ -27,7 +27,7 @@ object float: /** Multiplication of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val mul: 4.0f * 2.0f = 8.0f * ``` @@ -38,7 +38,7 @@ object float: /** Integer division of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val div: 5.0f / 2.0f = 2.5f * ``` @@ -49,7 +49,7 @@ object float: /** Remainder of the division of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val mod: 5.0f % 2.0f = 1.0f * ``` @@ -60,7 +60,7 @@ object float: /** Less-than comparison of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val lt1: 4.0f < 2.0f = false * val lt2: 2.0f < 4.0f = true @@ -72,7 +72,7 @@ object float: /** Greater-than comparison of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val gt1: 4.0f > 2.0f = true * val gt2: 2.0f > 2.0f = false @@ -84,7 +84,7 @@ object float: /** Greater-or-equal comparison of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val ge1: 4.0f >= 2.0f = true * val ge2: 2.0f >= 3.0f = false @@ -96,7 +96,7 @@ object float: /** Less-or-equal comparison of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val lt1: 4.0f <= 2.0f = false * val lt2: 2.0f <= 2.0f = true @@ -108,7 +108,7 @@ object float: /** Absolute value of an `Float` singleton type. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val abs: Abs[-1.0f] = 1.0f * ``` @@ -119,7 +119,7 @@ object float: /** Negation of an `Float` singleton type. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val neg1: Negate[-1.0f] = 1.0f * val neg2: Negate[1.0f] = -1.0f @@ -131,7 +131,7 @@ object float: /** Minimum of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val min: Min[-1.0f, 1.0f] = -1.0f * ``` @@ -142,7 +142,7 @@ object float: /** Maximum of two `Float` singleton types. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val max: Max[-1.0f, 1.0f] = 1.0f * ``` @@ -153,7 +153,7 @@ object float: /** Int conversion of a `Float` singleton type. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val x: ToInt[1.0f] = 1 * ``` @@ -164,7 +164,7 @@ object float: /** Long conversion of a `Float` singleton type. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val x: ToLong[1.0f] = 1L * ``` @@ -175,7 +175,7 @@ object float: /** Double conversion of a `Float` singleton type. * ```scala * //{ - * import compiletime.ops.float._ + * import compiletime.ops.float.* * //} * val x: ToDouble[1.0f] = 1.0 * ``` diff --git a/library/src/scala/compiletime/ops/int.scala b/library/src/scala/compiletime/ops/int.scala index 7efdc1851c4e..ed4a3c3c3261 100644 --- a/library/src/scala/compiletime/ops/int.scala +++ b/library/src/scala/compiletime/ops/int.scala @@ -6,7 +6,7 @@ object int: * * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * type S[N <: Int] <: Int = N match { * case 0 => 1 @@ -23,7 +23,7 @@ object int: /** Addition of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val sum: 2 + 2 = 4 * ``` @@ -34,7 +34,7 @@ object int: /** Subtraction of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val sub: 4 - 2 = 2 * ``` @@ -45,7 +45,7 @@ object int: /** Multiplication of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val mul: 4 * 2 = 8 * ``` @@ -56,7 +56,7 @@ object int: /** Integer division of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val div: 5 / 2 = 2 * ``` @@ -67,7 +67,7 @@ object int: /** Remainder of the division of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val mod: 5 % 2 = 1 * ``` @@ -78,7 +78,7 @@ object int: /** Binary left shift of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val lshift: 1 << 2 = 4 * ``` @@ -89,7 +89,7 @@ object int: /** Binary right shift of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val rshift: 10 >> 1 = 5 * ``` @@ -100,7 +100,7 @@ object int: /** Binary right shift of `X` by `Y`, filling the left with zeros. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val rshiftzero: 10 >>> 1 = 5 * ``` @@ -111,7 +111,7 @@ object int: /** Bitwise xor of `X` and `Y`. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val xor: 10 ^ 30 = 20 * ``` @@ -122,7 +122,7 @@ object int: /** Less-than comparison of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val lt1: 4 < 2 = false * val lt2: 2 < 4 = true @@ -134,7 +134,7 @@ object int: /** Greater-than comparison of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val gt1: 4 > 2 = true * val gt2: 2 > 2 = false @@ -146,7 +146,7 @@ object int: /** Greater-or-equal comparison of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val ge1: 4 >= 2 = true * val ge2: 2 >= 3 = false @@ -158,7 +158,7 @@ object int: /** Less-or-equal comparison of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val lt1: 4 <= 2 = false * val lt2: 2 <= 2 = true @@ -170,7 +170,7 @@ object int: /** Bitwise and of `X` and `Y`. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val and1: BitwiseAnd[4, 4] = 4 * val and2: BitwiseAnd[10, 5] = 0 @@ -182,7 +182,7 @@ object int: /** Bitwise or of `X` and `Y`. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val or: BitwiseOr[10, 11] = 11 * ``` @@ -193,7 +193,7 @@ object int: /** Absolute value of an `Int` singleton type. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val abs: Abs[-1] = 1 * ``` @@ -204,7 +204,7 @@ object int: /** Negation of an `Int` singleton type. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val neg1: Negate[-1] = 1 * val neg2: Negate[1] = -1 @@ -216,7 +216,7 @@ object int: /** Minimum of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val min: Min[-1, 1] = -1 * ``` @@ -227,7 +227,7 @@ object int: /** Maximum of two `Int` singleton types. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val max: Max[-1, 1] = 1 * ``` @@ -238,7 +238,7 @@ object int: /** String conversion of an `Int` singleton type. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val abs: ToString[1] = "1" * ``` @@ -250,7 +250,7 @@ object int: /** Long conversion of an `Int` singleton type. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val x: ToLong[1] = 1L * ``` @@ -261,7 +261,7 @@ object int: /** Float conversion of an `Int` singleton type. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val x: ToFloat[1] = 1.0f * ``` @@ -272,7 +272,7 @@ object int: /** Double conversion of an `Int` singleton type. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val x: ToDouble[1] = 1.0 * ``` @@ -286,7 +286,7 @@ object int: * in other words if it is equal to zero. * ```scala * //{ - * import compiletime.ops.int._ + * import compiletime.ops.int.* * //} * val zero_lzc: NumberOfLeadingZeros[0] = 32 * val eight_lzc: NumberOfLeadingZeros[8] = 28 diff --git a/library/src/scala/compiletime/ops/long.scala b/library/src/scala/compiletime/ops/long.scala index 708fae3f07f3..25563ac70367 100644 --- a/library/src/scala/compiletime/ops/long.scala +++ b/library/src/scala/compiletime/ops/long.scala @@ -21,7 +21,7 @@ object long: /** Addition of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val sum: 2L + 2L = 4L * ``` @@ -32,7 +32,7 @@ object long: /** Subtraction of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val sub: 4L - 2L = 2L * ``` @@ -43,7 +43,7 @@ object long: /** Multiplication of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val mul: 4L * 2L = 8L * ``` @@ -54,7 +54,7 @@ object long: /** Integer division of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val div: 5L / 2L = 2L * ``` @@ -65,7 +65,7 @@ object long: /** Remainder of the division of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val mod: 5L % 2L = 1L * ``` @@ -76,7 +76,7 @@ object long: /** Binary left shift of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val lshift: 1L << 2L = 4L * ``` @@ -87,7 +87,7 @@ object long: /** Binary right shift of `X` by `Y`. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val rshift: 10L >> 1L = 5L * ``` @@ -98,7 +98,7 @@ object long: /** Binary right shift of `X` by `Y`, filling the left with zeros. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val rshiftzero: 10L >>> 1L = 5L * ``` @@ -109,7 +109,7 @@ object long: /** Bitwise xor of `X` and `Y`. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val xor: 10L ^ 30L = 20L * ``` @@ -120,7 +120,7 @@ object long: /** Less-than comparison of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val lt1: 4L < 2L = false * val lt2: 2L < 4L = true @@ -132,7 +132,7 @@ object long: /** Greater-than comparison of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val gt1: 4L > 2L = true * val gt2: 2L > 2L = false @@ -144,7 +144,7 @@ object long: /** Greater-or-equal comparison of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val ge1: 4L >= 2L = true * val ge2: 2L >= 3L = false @@ -156,7 +156,7 @@ object long: /** Less-or-equal comparison of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val lt1: 4L <= 2L = false * val lt2: 2L <= 2L = true @@ -168,7 +168,7 @@ object long: /** Bitwise and of `X` and `Y`. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val and1: BitwiseAnd[4L, 4L] = 4L * val and2: BitwiseAnd[10L, 5L] = 0L @@ -180,7 +180,7 @@ object long: /** Bitwise or of `X` and `Y`. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val or: BitwiseOr[10L, 11L] = 11L * ``` @@ -191,7 +191,7 @@ object long: /** Absolute value of an `Long` singleton type. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val abs: Abs[-1L] = 1L * ``` @@ -202,7 +202,7 @@ object long: /** Negation of an `Long` singleton type. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val neg1: Negate[-1L] = 1L * val neg2: Negate[1L] = -1L @@ -214,7 +214,7 @@ object long: /** Minimum of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val min: Min[-1L, 1L] = -1L * ``` @@ -225,7 +225,7 @@ object long: /** Maximum of two `Long` singleton types. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val max: Max[-1L, 1L] = 1L * ``` @@ -239,7 +239,7 @@ object long: * in other words if it is equal to zero. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val zero_lzc: NumberOfLeadingZeros[0L] = 64 * val eight_lzc: NumberOfLeadingZeros[8L] = 60 @@ -253,7 +253,7 @@ object long: /** Int conversion of a `Long` singleton type. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val x: ToInt[1L] = 1 * ``` @@ -264,7 +264,7 @@ object long: /** Float conversion of a `Long` singleton type. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val x: ToFloat[1L] = 1.0f * ``` @@ -275,7 +275,7 @@ object long: /** Double conversion of a `Long` singleton type. * ```scala * //{ - * import compiletime.ops.long._ + * import compiletime.ops.long.* * //} * val x: ToDouble[1L] = 1.0 * ``` diff --git a/library/src/scala/compiletime/ops/string.scala b/library/src/scala/compiletime/ops/string.scala index 4c220f992707..63caa9ae6371 100644 --- a/library/src/scala/compiletime/ops/string.scala +++ b/library/src/scala/compiletime/ops/string.scala @@ -5,7 +5,7 @@ object string: /** Concatenation of two `String` singleton types. * ```scala * //{ - * import compiletime.ops.string._ + * import compiletime.ops.string.* * //} * val hello: "hello " + "world" = "hello world" * ``` @@ -16,7 +16,7 @@ object string: /** Length of a `String` singleton type. * ```scala * //{ - * import compiletime.ops.string._ + * import compiletime.ops.string.* * //} * val helloSize: Length["hello"] = 5 * ``` @@ -30,7 +30,7 @@ object string: * Thus the length of the substring is IEnd-IBeg. * ```scala * //{ - * import compiletime.ops.string._ + * import compiletime.ops.string.* * //} * val x: Substring["hamburger", 4, 8] = "urge" * val y: Substring["smiles", 1, 5] = "mile" @@ -43,7 +43,7 @@ object string: * regular expression `String` singleton type. * ```scala * //{ - * import compiletime.ops.string._ + * import compiletime.ops.string.* * //} * val x: Matches["unhappy", "un.*"] = true * ``` @@ -56,7 +56,7 @@ object string: * the sequence is at index 0, the next at index 1, and so on. * ```scala * //{ - * import string._ + * import string.* * //} * val c: CharAt["hello", 0] = 'h' * ``` diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index 03afedae8f49..525f647eaaac 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -18,7 +18,7 @@ object Expr { * Some bindings may be elided as an early optimization. */ def betaReduce[T](expr: Expr[T])(using Quotes): Expr[T] = - import quotes.reflect._ + import quotes.reflect.* Term.betaReduce(expr.asTerm) match case Some(expr1) => expr1.asExpr.asInstanceOf[Expr[T]] case _ => expr @@ -28,7 +28,7 @@ object Expr { * will be equivalent to `'{ $s1; $s2; ...; $e }`. */ def block[T](statements: List[Expr[Any]], expr: Expr[T])(using Quotes): Expr[T] = { - import quotes.reflect._ + import quotes.reflect.* Block(statements.map(asTerm), expr.asTerm).asExpr.asInstanceOf[Expr[T]] } @@ -240,7 +240,7 @@ object Expr { * @tparam T type of the implicit parameter */ def summon[T](using Type[T])(using Quotes): Option[Expr[T]] = { - import quotes.reflect._ + import quotes.reflect.* Implicits.search(TypeRepr.of[T]) match { case iss: ImplicitSearchSuccess => Some(iss.tree.asExpr.asInstanceOf[Expr[T]]) case isf: ImplicitSearchFailure => None diff --git a/library/src/scala/quoted/ExprMap.scala b/library/src/scala/quoted/ExprMap.scala index 70af00b16be5..fbe5dee2b342 100644 --- a/library/src/scala/quoted/ExprMap.scala +++ b/library/src/scala/quoted/ExprMap.scala @@ -7,7 +7,7 @@ trait ExprMap: /** Map sub-expressions an expression `e` with a type `T` */ def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = { - import quotes.reflect._ + import quotes.reflect.* final class MapChildren() { def transformStatement(tree: Statement)(owner: Symbol): Statement = { diff --git a/library/src/scala/quoted/FromExpr.scala b/library/src/scala/quoted/FromExpr.scala index aadea9bcb0b6..bcd10e4f09ed 100644 --- a/library/src/scala/quoted/FromExpr.scala +++ b/library/src/scala/quoted/FromExpr.scala @@ -82,7 +82,7 @@ object FromExpr { /** Lift a quoted primitive value `'{ x }` into `x` */ private class PrimitiveFromExpr[T <: Boolean | Byte | Short | Int | Long | Float | Double | Char | String] extends FromExpr[T] { def unapply(expr: Expr[T])(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* def rec(tree: Term): Option[T] = tree match { case Block(stats, e) => if stats.isEmpty then rec(e) else None case Inlined(_, bindings, e) => if bindings.isEmpty then rec(e) else None diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 055c560ad64a..884ae9efe35b 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -8,9 +8,9 @@ import scala.reflect.TypeTest * * Usage: * ```scala - * import scala.quoted._ + * import scala.quoted.* * def myExpr[T](using Quotes): Expr[T] = { - * import quotes.reflect._ + * import quotes.reflect.* * ??? * } * ``` @@ -24,7 +24,7 @@ transparent inline def quotes(using q: Quotes): q.type = q * This API does not have the static type guarantees that `Expr` and `Type` provide. * `Quotes` are generated from an enclosing `${ ... }` or `scala.staging.run`. For example: * ```scala sc:nocompile - * import scala.quoted._ + * import scala.quoted.* * inline def myMacro: Expr[T] = * ${ /* (quotes: Quotes) ?=> */ myExpr } * def myExpr(using Quotes): Expr[T] = @@ -105,9 +105,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * * Usage: * ```scala - * import scala.quoted._ + * import scala.quoted.* * def f(expr: Expr[Int])(using Quotes) = - * import quotes.reflect._ + * import quotes.reflect.* * val ast: Term = expr.asTerm * ??? * ``` @@ -2354,10 +2354,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * `ParamClause` encodes the following enumeration * ```scala * //{ - * import scala.quoted._ + * import scala.quoted.* * def inQuotes(using Quotes) = { * val q: Quotes = summon[Quotes] - * import q.reflect._ + * import q.reflect.* * //} * enum ParamClause: * case TypeParamClause(params: List[TypeDef]) @@ -2606,10 +2606,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * Usage: * ```scala * //{ - * import scala.quoted._ + * import scala.quoted.* * def f(using Quotes) = { * val q: Quotes = summon[Quotes] - * import q.reflect._ + * import q.reflect.* * val typeRepr: TypeRepr = ??? * //} * typeRepr.asType match @@ -3745,7 +3745,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ```scala * //{ * given Quotes = ??? - * import quotes.reflect._ + * import quotes.reflect.* * //} * val moduleName: String = Symbol.freshName("MyModule") * val parents = List(TypeTree.of[Object]) @@ -3767,7 +3767,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ```scala * //{ * given Quotes = ??? - * import quotes.reflect._ + * import quotes.reflect.* * //} * '{ * object MyModule$macro$1 extends Object: @@ -4122,10 +4122,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * Usages: * ```scala * def rhsExpr(using q: Quotes): Expr[Unit] = - * import q.reflect._ + * import q.reflect.* * '{ val y = ???; (y, y) } * def aValDef(using q: Quotes)(owner: q.reflect.Symbol) = - * import q.reflect._ + * import q.reflect.* * val sym = Symbol.newVal(owner, "x", TypeRepr.of[Unit], Flags.EmptyFlags, Symbol.noSymbol) * val rhs = rhsExpr(using sym.asQuotes).asTerm * ValDef(sym, Some(rhs)) @@ -4134,7 +4134,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ```scala * //{ * def inQuotes(using q: Quotes) = { - * import q.reflect._ + * import q.reflect.* * //} * new TreeMap: * override def transformTerm(tree: Term)(owner: Symbol): Term = @@ -4749,7 +4749,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ```scala * //{ * def inQuotes(using q: Quotes) = { - * import q.reflect._ + * import q.reflect.* * //} * class MyTreeAccumulator[X] extends TreeAccumulator[X] { * def foldTree(x: X, tree: Tree)(owner: Symbol): X = ??? @@ -4862,7 +4862,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ```scala * //{ * def inQuotes(using q: Quotes) = { - * import q.reflect._ + * import q.reflect.* * //} * class MyTraverser extends TreeTraverser { * override def traverseTree(tree: Tree)(owner: Symbol): Unit = ??? @@ -4888,7 +4888,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * ```scala * //{ * def inQuotes(using q: Quotes) = { - * import q.reflect._ + * import q.reflect.* * //} * class MyTreeMap extends TreeMap { * override def transformTree(tree: Tree)(owner: Symbol): Tree = ??? diff --git a/library/src/scala/quoted/ToExpr.scala b/library/src/scala/quoted/ToExpr.scala index 883b55833af5..042c8ff37a52 100644 --- a/library/src/scala/quoted/ToExpr.scala +++ b/library/src/scala/quoted/ToExpr.scala @@ -20,70 +20,70 @@ object ToExpr { /** Default implementation of `ToExpr[Boolean]` */ given BooleanToExpr[T <: Boolean]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(BooleanConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Byte]` */ given ByteToExpr[T <: Byte]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(ByteConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Short]` */ given ShortToExpr[T <: Short]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(ShortConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Int]` */ given IntToExpr[T <: Int]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(IntConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Long]` */ given LongToExpr[T <: Long]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(LongConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Float]` */ given FloatToExpr[T <: Float]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(FloatConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Double]` */ given DoubleToExpr[T <: Double]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(DoubleConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Char]` */ given CharToExpr[T <: Char]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(CharConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[String]` */ given StringToExpr[T <: String]: ToExpr[T] with { def apply(x: T)(using Quotes) = - import quotes.reflect._ + import quotes.reflect.* Literal(StringConstant(x)).asExpr.asInstanceOf[Expr[T]] } /** Default implementation of `ToExpr[Class[T]]` */ given ClassToExpr[T <: Class[?]]: ToExpr[T] with { def apply(x: T)(using Quotes) = { - import quotes.reflect._ + import quotes.reflect.* Ref(defn.Predef_classOf).appliedToType(TypeRepr.typeConstructorOf(x)).asExpr.asInstanceOf[Expr[T]] } } diff --git a/library/src/scala/quoted/Varargs.scala b/library/src/scala/quoted/Varargs.scala index e2e74c3879c6..61937fc8fecb 100644 --- a/library/src/scala/quoted/Varargs.scala +++ b/library/src/scala/quoted/Varargs.scala @@ -28,7 +28,7 @@ object Varargs { * ``` */ def apply[T](xs: Seq[Expr[T]])(using Type[T])(using Quotes): Expr[Seq[T]] = { - import quotes.reflect._ + import quotes.reflect.* Repeated(xs.map(_.asTerm).toList, TypeTree.of[T]).asExpr.asInstanceOf[Expr[Seq[T]]] } @@ -43,7 +43,7 @@ object Varargs { * */ def unapply[T](expr: Expr[Seq[T]])(using Quotes): Option[Seq[Expr[T]]] = { - import quotes.reflect._ + import quotes.reflect.* def rec(tree: Term): Option[Seq[Expr[T]]] = tree match { case Repeated(elems, _) => Some(elems.map(x => x.asExpr.asInstanceOf[Expr[T]])) case Typed(e, _) => rec(e) diff --git a/library/src/scala/util/FromDigits.scala b/library/src/scala/util/FromDigits.scala index 1577f4103e03..cb73782829ff 100644 --- a/library/src/scala/util/FromDigits.scala +++ b/library/src/scala/util/FromDigits.scala @@ -1,6 +1,6 @@ package scala.util import scala.math.{BigInt} -import quoted._ +import quoted.* import annotation.internal.sharable From 257a10cf8a82cbc5c0fc4aec45de83486054e424 Mon Sep 17 00:00:00 2001 From: odersky Date: Sun, 5 Nov 2023 19:54:26 +0100 Subject: [PATCH 131/216] Admit parametric aliases of classes in parent typing When typing parent types as constructors with implicitly added `()` arguments, also admit alias types that become classes after eta-collapsing. Fixes #18623 --- .../tools/dotc/core/TypeApplications.scala | 15 +++++++++----- .../dotty/tools/dotc/core/TypeComparer.scala | 2 +- .../core/unpickleScala2/Scala2Unpickler.scala | 2 +- .../src/dotty/tools/dotc/typer/Deriving.scala | 2 +- .../src/dotty/tools/dotc/typer/Namer.scala | 4 ++-- .../dotty/tools/dotc/typer/RefChecks.scala | 2 +- .../src/dotty/tools/dotc/typer/Typer.scala | 2 +- tests/neg/i4557.scala | 4 ++-- tests/pos/i18623.scala | 15 ++++++++++++++ tests/pos/i18623a.scala | 20 +++++++++++++++++++ 10 files changed, 54 insertions(+), 14 deletions(-) create mode 100644 tests/pos/i18623.scala create mode 100644 tests/pos/i18623a.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 1cd1a3ad4d39..1dc81946d723 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -36,7 +36,7 @@ object TypeApplications { def apply(tycon: Type)(using Context): Type = assert(tycon.typeParams.nonEmpty, tycon) - tycon.EtaExpand(tycon.typeParamSymbols) + tycon.etaExpand(tycon.typeParamSymbols) /** Test that the parameter bounds in a hk type lambda `[X1,...,Xn] => C[X1, ..., Xn]` * contain the bounds of the type parameters of `C`. This is necessary to be able to @@ -71,7 +71,7 @@ object TypeApplications { */ def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(using Context): List[Type] = if (tparams.isEmpty) args - else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramInfoOrCompleter)) + else args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(tparam.paramInfoOrCompleter)) /** A type map that tries to reduce (part of) the result type of the type lambda `tycon` * with the given `args`(some of which are wildcard arguments represented by type bounds). @@ -245,7 +245,7 @@ class TypeApplications(val self: Type) extends AnyVal { def topType(using Context): Type = if self.hasSimpleKind then defn.AnyType - else EtaExpand(self.typeParams) match + else etaExpand(self.typeParams) match case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resultType.topType) case _ => @@ -302,7 +302,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** Convert a type constructor `TC` which has type parameters `X1, ..., Xn` * to `[X1, ..., Xn] -> TC[X1, ..., Xn]`. */ - def EtaExpand(tparams: List[TypeParamInfo])(using Context): Type = + def etaExpand(tparams: List[TypeParamInfo])(using Context): Type = HKTypeLambda.fromParams(tparams, self.appliedTo(tparams.map(_.paramRef))) //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}") @@ -311,7 +311,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (isLambdaSub) self else EtaExpansion(self) /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ - def EtaExpandIfHK(bound: Type)(using Context): Type = { + def etaExpandIfHK(bound: Type)(using Context): Type = { val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self else self match { @@ -321,6 +321,11 @@ class TypeApplications(val self: Type) extends AnyVal { } } + /** Maps [Ts] => C[Ts] to C */ + def etaCollapse(using Context): Type = self match + case EtaExpansion(classType) => classType + case _ => self + /** The type representing * * T[U1, ..., Un] diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d5b97dca6164..9a3b6e3215c6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -746,7 +746,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => val tparams1 = tp1.typeParams if (tparams1.nonEmpty) - return recur(tp1.EtaExpand(tparams1), tp2) || fourthTry + return recur(tp1.etaExpand(tparams1), tp2) || fourthTry tp2 match { case EtaExpansion(tycon2: TypeRef) if tycon2.symbol.isClass && tycon2.symbol.is(JavaDefined) => recur(tp1, tycon2) || fourthTry diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 3e211e75b73b..f982cb5ee9bc 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -832,7 +832,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas } else if args.nonEmpty then tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args.map(translateTempPoly))) - else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams) + else if (sym.typeParams.nonEmpty) tycon.etaExpand(sym.typeParams) else tycon case TYPEBOUNDStpe => val lo = readTypeRef() diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 8fdc468780ba..abfab1e3b981 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -165,7 +165,7 @@ trait Deriving { // case (a) ... see description above val derivedParams = clsParams.dropRight(instanceArity) val instanceType = - if (instanceArity == clsArity) clsType.EtaExpand(clsParams) + if (instanceArity == clsArity) clsType.etaExpand(clsParams) else { val derivedParamTypes = derivedParams.map(_.typeRef) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 5361f37c2a76..7bbf59e0fd6a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1197,7 +1197,7 @@ class Namer { typer: Typer => val forwarderName = checkNoConflict(alias.toTypeName, isPrivate = false, span) var target = pathType.select(sym) if target.typeParams.nonEmpty then - target = target.EtaExpand(target.typeParams) + target = target.etaExpand(target.typeParams) newSymbol( cls, forwarderName, Exported | Final, @@ -1518,7 +1518,7 @@ class Namer { typer: Typer => def typedParentType(tree: untpd.Tree): tpd.Tree = val parentTpt = typer.typedType(parent, AnyTypeConstructorProto) - val ptpe = parentTpt.tpe + val ptpe = parentTpt.tpe.dealias.etaCollapse if ptpe.typeParams.nonEmpty && ptpe.underlyingClassRef(refinementOK = false).exists then diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index af279844f370..06fb1fbc3b90 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -372,7 +372,7 @@ object RefChecks { */ def checkOverride(checkSubType: (Type, Type) => Context ?=> Boolean, member: Symbol, other: Symbol): Unit = def memberTp(self: Type) = - if (member.isClass) TypeAlias(member.typeRef.EtaExpand(member.typeParams)) + if (member.isClass) TypeAlias(member.typeRef.etaExpand(member.typeParams)) else self.memberInfo(member) def otherTp(self: Type) = self.memberInfo(other) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 66c79658b6ab..11674e6b8522 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -4306,7 +4306,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer AppliedType(tree.tpe, tp.typeParams.map(Function.const(TypeBounds.empty))) else // Eta-expand higher-kinded type - tree.tpe.EtaExpand(tp.typeParamSymbols) + tree.tpe.etaExpand(tp.typeParamSymbols) tree.withType(tp1) } if (ctx.mode.is(Mode.Pattern) || ctx.mode.isQuotedPattern || tree1.tpe <:< pt) tree1 diff --git a/tests/neg/i4557.scala b/tests/neg/i4557.scala index ffdf3b5be97e..74f9099c6d08 100644 --- a/tests/neg/i4557.scala +++ b/tests/neg/i4557.scala @@ -9,11 +9,11 @@ object O { type S0[X, Y] = C1[X, Y] type S1 = C1[Int] // error - class D0 extends T0 // error + class D0 extends T0 // was error, now ok class D1 extends T0[Int] class D2 extends T0[String, Int] // error - class E0 extends S0 // error + class E0 extends S0 // was error, now ok class E1 extends S0[Int] // error class E2 extends S0[String, Int] } diff --git a/tests/pos/i18623.scala b/tests/pos/i18623.scala new file mode 100644 index 000000000000..e34575c6e697 --- /dev/null +++ b/tests/pos/i18623.scala @@ -0,0 +1,15 @@ +final abstract class ForcedRecompilationToken[T] +object ForcedRecompilationToken { + implicit def default: ForcedRecompilationToken["abc"] = null +} + +class GoodNoParens[T](implicit ev: ForcedRecompilationToken[T]) +type BadNoParens[T] = GoodNoParens[T] + +// error +object A extends BadNoParens + +// ok +object B extends BadNoParens() +object C extends GoodNoParens + diff --git a/tests/pos/i18623a.scala b/tests/pos/i18623a.scala new file mode 100644 index 000000000000..043bac046896 --- /dev/null +++ b/tests/pos/i18623a.scala @@ -0,0 +1,20 @@ +final abstract class ForcedRecompilationToken[T] +object ForcedRecompilationToken { + implicit def default: ForcedRecompilationToken["abc"] = null +} + +object x { +class GoodNoParens[T](implicit ev: ForcedRecompilationToken[T]) +} +export x.GoodNoParens as BadNoParens + +// error +object A extends BadNoParens + +// ok +object B extends BadNoParens() +object C extends x.GoodNoParens + +object App extends App { + println("compiled") +} \ No newline at end of file From 9d5768017bcbfcead6ee902567d96c6da313a7fc Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 30 Oct 2023 11:22:58 +0100 Subject: [PATCH 132/216] Add documentation on Repeated and RepeatedParamClass Close #18784 --- library/src/scala/quoted/Quotes.scala | 30 ++++++++++++++++++++++++++- tests/pos/i18784/Macro_1.scala | 20 ++++++++++++++++++ tests/pos/i18784/Test_2.scala | 2 ++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i18784/Macro_1.scala create mode 100644 tests/pos/i18784/Test_2.scala diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 055c560ad64a..bc0f6d69adf8 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -1622,7 +1622,30 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => end extension end ReturnMethods - /** Tree representing a variable argument list in the source code */ + /** Tree representing a variable argument list in the source code. + * + * This tree is used to encode varargs terms. The Repeated encapsulates + * the sequence of the elements but needs to be wrapped in a + * `scala.[T]` (see `defn.RepeatedParamClass`). For example the + * arguments `1, 2` of `List.apply(1, 2)` can be represented as follows: + * + * + * ```scala + * //{ + * import scala.quoted._ + * def inQuotes(using Quotes) = { + * val q: Quotes = summon[Quotes] + * import q.reflect._ + * //} + * val intArgs = List(Literal(Constant(1)), Literal(Constant(2))) + * Typed( + * Repeated(intArgs, TypeTree.of[Int]), + * Inferred(defn.RepeatedParamClass.typeRef.appliedTo(TypeRepr.of[Int])) + * //{ + * } + * //} + * ``` + */ type Repeated <: Term /** `TypeTest` that allows testing at runtime in a pattern match if a `Tree` is a `Repeated` */ @@ -1633,8 +1656,11 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Methods of the module object `val Repeated` */ trait RepeatedModule { this: Repeated.type => + /** Create a literal sequence of elements */ def apply(elems: List[Term], tpt: TypeTree): Repeated + /** Copy a literal sequence of elements */ def copy(original: Tree)(elems: List[Term], tpt: TypeTree): Repeated + /** Matches a literal sequence of elements */ def unapply(x: Repeated): (List[Term], TypeTree) } @@ -4314,6 +4340,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** A dummy class symbol that is used to indicate repeated parameters * compiled by the Scala compiler. + * + * @see Repeated */ def RepeatedParamClass: Symbol diff --git a/tests/pos/i18784/Macro_1.scala b/tests/pos/i18784/Macro_1.scala new file mode 100644 index 000000000000..2eb93205d5ac --- /dev/null +++ b/tests/pos/i18784/Macro_1.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macro { + inline def repeated = ${Macro.repeatedImpl} + def repeatedImpl(using Quotes):Expr[List[Int]] = { + import quotes.reflect.* + val args = List(Expr(1), Expr(2)) + val listObjectTerm = '{ List }.asTerm + Apply( + TypeApply( + Select.unique(listObjectTerm, "apply"), + List(TypeTree.of[Int]) + ), + List( + Typed( + Repeated(args.map(_.asTerm), TypeTree.of[Int]), + Inferred(defn.RepeatedParamClass.typeRef.appliedTo(TypeRepr.of[Int])))) + ).asExprOf[List[Int]] + } +} \ No newline at end of file diff --git a/tests/pos/i18784/Test_2.scala b/tests/pos/i18784/Test_2.scala new file mode 100644 index 000000000000..a5fb441b4137 --- /dev/null +++ b/tests/pos/i18784/Test_2.scala @@ -0,0 +1,2 @@ +def Test: Unit = + Macro.repeated From f214f057e0c7202e5857b5ef0459b29120700ed2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 3 Nov 2023 17:08:19 +0100 Subject: [PATCH 133/216] Fix #18816: Transfer the span of rewired `This` nodes in `fullyParameterizedDef`. --- .../dotc/transform/FullParameterization.scala | 2 +- .../backend/jvm/DottyBytecodeTests.scala | 32 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala index 8ca600577244..498fc8e28ff9 100644 --- a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -207,7 +207,7 @@ trait FullParameterization { .subst(origLeadingTypeParamSyms ++ origOtherParamSyms, (trefs ++ argRefs).tpes) .substThisUnlessStatic(origClass, thisRef.tpe), treeMap = { - case tree: This if tree.symbol == origClass => thisRef + case tree: This if tree.symbol == origClass => thisRef.withSpan(tree.span) case tree => rewireTree(tree, Nil) orElse tree }, oldOwners = origMeth :: Nil, diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 33b18318d59f..84c7b916fa74 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -1701,6 +1701,38 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } + + @Test def i18816 = { + // The primary goal of this test is to check that `LineNumber` have correct numbers + val source = + """trait Context + | + |class A(x: Context) extends AnyVal: + | given [T]: Context = x + | + | def m1 = + | println(m3) + | def m2 = + | m3 // line 9 + | println(m2) + | + | def m3(using Context): String = "" + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("A$.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "m2$1") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + + // There used to be references to line 7 here + val expected = List( + LineNumber(9, Label(0)), + ) + + assertSameCode(instructions, expected) + } + } } object invocationReceiversTestCode { From 121e28fe6300c033379926f6a68f87a2ee24e87a Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 6 Nov 2023 11:06:46 +0100 Subject: [PATCH 134/216] Improve `with` in type migration warning --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 5 +++-- tests/neg/with-type-operator-future-migration.check | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index c845ea8f74c7..bbe701e4f3d6 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1756,12 +1756,13 @@ object Parsers { if in.token == LBRACE || in.token == INDENT then t else + val withSpan = Span(withOffset, withOffset + 4) report.errorOrMigrationWarning( DeprecatedWithOperator(rewriteNotice(`future-migration`)), - in.sourcePos(withOffset), + source.atSpan(withSpan), from = future) if sourceVersion == `future-migration` then - patch(source, Span(withOffset, withOffset + 4), "&") + patch(source, withSpan, "&") atSpan(startOffset(t)) { makeAndType(t, withType()) } else t diff --git a/tests/neg/with-type-operator-future-migration.check b/tests/neg/with-type-operator-future-migration.check index 29ea1d5aad4c..845601349c83 100644 --- a/tests/neg/with-type-operator-future-migration.check +++ b/tests/neg/with-type-operator-future-migration.check @@ -1,6 +1,6 @@ -- [E003] Syntax Error: tests/neg/with-type-operator-future-migration.scala:5:13 --------------------------------------- 5 |def foo: Int with String = ??? // error - | ^ + | ^^^^ | with as a type operator has been deprecated; use & instead | This construct can be rewritten automatically under -rewrite -source future-migration. | From 3a6af8c007e8c60182e3dba0f1abb67401a54478 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 6 Nov 2023 11:15:23 +0100 Subject: [PATCH 135/216] Update `with` type to `&` in compiler --- compiler/src/dotty/tools/dotc/ast/untpd.scala | 2 +- compiler/src/dotty/tools/dotc/config/CliCommand.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 41ba452fa80a..153100aa77ff 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -42,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) extends MemberDef { - type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef + type ThisTree[+T <: Untyped] <: Trees.NameTree[T] & Trees.MemberDef[T] & ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) } diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 914df040fbf7..64f1d63c61ca 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -12,7 +12,7 @@ import scala.PartialFunction.cond trait CliCommand: - type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup + type ConcreteSettings <: CommonScalaSettings & Settings.SettingGroup def versionMsg: String From 3aaa98ccda2d746568ddce82521c20e551daa812 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 3 Nov 2023 19:23:36 +0100 Subject: [PATCH 136/216] fix bootstrapped tests --- tests/neg/irrefutable.check | 12 ++++++ tests/neg/irrefutable.scala | 42 +++++++++++++++++++ .../refutable-pattern-binding-messages.check | 32 +++++++------- tests/pos/irrefutable.scala | 22 ---------- tests/pos/t6205.scala | 2 +- tests/run/ReplacementMatching.scala | 4 +- tests/run/irrefutable.check | 5 +++ tests/run/irrefutable.scala | 36 ++++++++++++++++ tests/run/patmat-bind-typed.scala | 2 +- tests/run/quoted-sematics-1.scala | 8 ++-- tests/run/t6406-regextract.scala | 4 +- tests/run/t6646.scala | 6 +-- tests/run/t6968.scala | 2 +- 13 files changed, 125 insertions(+), 52 deletions(-) create mode 100644 tests/neg/irrefutable.check create mode 100644 tests/neg/irrefutable.scala delete mode 100644 tests/pos/irrefutable.scala create mode 100644 tests/run/irrefutable.check create mode 100644 tests/run/irrefutable.scala diff --git a/tests/neg/irrefutable.check b/tests/neg/irrefutable.check new file mode 100644 index 000000000000..01baff685cbc --- /dev/null +++ b/tests/neg/irrefutable.check @@ -0,0 +1,12 @@ +-- [E008] Not Found Error: tests/neg/irrefutable.scala:27:29 ----------------------------------------------------------- +27 | for (case Foo(x: Int) <- xs) yield x // error + | ^^ + | value withFilter is not a member of Lst[Foo[Any]] +-- Error: tests/neg/irrefutable.scala:30:16 ---------------------------------------------------------------------------- +30 | for (Foo(x: Int) <- xs) yield x // error + | ^^^ + | pattern's type Int is more specialized than the right hand side expression's type Any + | + | If the narrowing is intentional, this can be communicated by adding the `case` keyword before the full pattern, + | which will result in a filtering for expression (using `withFilter`). + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. diff --git a/tests/neg/irrefutable.scala b/tests/neg/irrefutable.scala new file mode 100644 index 000000000000..b4f2736998e6 --- /dev/null +++ b/tests/neg/irrefutable.scala @@ -0,0 +1,42 @@ +// This tests that A.f1 is recognized as an irrefutable pattern and A.f2_nocase is not, and therefore A.f2 solves this +// by adding a case to the pattern, which results in withFilter being inserted. +// see also: tests/run/irrefutable.scala for an example that exercises the insertion of withFilter. + +class Lst[+T](val id: String, val underlying: List[T]) { + def map[U](f: T => U): Lst[U] = new Lst(id, underlying.map(f)) + + // hide the withFilter so that there is a compile error + // def withFilter(f: T => Boolean): Lst.WithFilter[T] = new Lst.WithFilter(this, f) +} + +// object Lst: +// class WithFilter[+T](lst: Lst[T], filter: T => Boolean): +// def forwardingFilter[T1](filter: T1 => Boolean): T1 => Boolean = t => +// println(s"filtering $t in ${lst.id}") +// filter(t) + +// def map[U](f: T => U): Lst[U] = Lst(lst.id, lst.underlying.withFilter(forwardingFilter(filter)).map(f)) + +case class Foo[T](x: T) + +object A { + def f1(xs: Lst[Foo[Int]]): Lst[Int] = { + for (Foo(x: Int) <- xs) yield x + } + def f2(xs: Lst[Foo[Any]]): Lst[Int] = { + for (case Foo(x: Int) <- xs) yield x // error + } + def f2_nocase(xs: Lst[Foo[Any]]): Lst[Int] = { + for (Foo(x: Int) <- xs) yield x // error + } +} + +@main def Test = + val xs = new Lst("xs", List(Foo(1), Foo(2), Foo(3))) + println("=== mapping xs with A.f1 ===") + val xs1 = A.f1(xs) + assert(xs1.underlying == List(1, 2, 3)) + val ys = new Lst("ys", List(Foo(1: Any), Foo(2: Any), Foo(3: Any))) + println("=== mapping ys with A.f2 ===") + val ys1 = A.f2(ys) + assert(ys1.underlying == List(1, 2, 3)) diff --git a/tests/neg/refutable-pattern-binding-messages.check b/tests/neg/refutable-pattern-binding-messages.check index b1b8866e174f..5a9d85fd4447 100644 --- a/tests/neg/refutable-pattern-binding-messages.check +++ b/tests/neg/refutable-pattern-binding-messages.check @@ -1,11 +1,3 @@ --- Error: tests/neg/refutable-pattern-binding-messages.scala:5:14 ------------------------------------------------------ -5 | val Positive(p) = 5 // error: refutable extractor - | ^^^^^^^^^^^^^^^ - | pattern binding uses refutable extractor `Test.Positive` - | - | If this usage is intentional, this can be communicated by adding `: @unchecked` after the expression, - | which may result in a MatchError at runtime. - | This patch can be rewritten automatically under -rewrite -source 3.2-migration. -- Error: tests/neg/refutable-pattern-binding-messages.scala:6:14 ------------------------------------------------------ 6 | for Positive(i) <- List(1, 2, 3) do () // error: refutable extractor | ^^^^^^^^^^^ @@ -14,14 +6,6 @@ | If this usage is intentional, this can be communicated by adding the `case` keyword before the full pattern, | which will result in a filtering for expression (using `withFilter`). | This patch can be rewritten automatically under -rewrite -source 3.2-migration. --- Error: tests/neg/refutable-pattern-binding-messages.scala:10:20 ----------------------------------------------------- -10 | val i :: is = List(1, 2, 3) // error: pattern type more specialized - | ^^^^^^^^^^^^^ - | pattern's type ::[Int] is more specialized than the right hand side expression's type List[Int] - | - | If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, - | which may result in a MatchError at runtime. - | This patch can be rewritten automatically under -rewrite -source 3.2-migration. -- Error: tests/neg/refutable-pattern-binding-messages.scala:11:11 ----------------------------------------------------- 11 | for ((x: String) <- xs) do () // error: pattern type more specialized | ^^^^^^ @@ -38,6 +22,22 @@ | If the narrowing is intentional, this can be communicated by adding the `case` keyword before the full pattern, | which will result in a filtering for expression (using `withFilter`). | This patch can be rewritten automatically under -rewrite -source 3.2-migration. +-- Error: tests/neg/refutable-pattern-binding-messages.scala:5:14 ------------------------------------------------------ +5 | val Positive(p) = 5 // error: refutable extractor + | ^^^^^^^^^^^^^^^ + | pattern binding uses refutable extractor `Test.Positive` + | + | If this usage is intentional, this can be communicated by adding `: @unchecked` after the expression, + | which may result in a MatchError at runtime. + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. +-- Error: tests/neg/refutable-pattern-binding-messages.scala:10:20 ----------------------------------------------------- +10 | val i :: is = List(1, 2, 3) // error: pattern type more specialized + | ^^^^^^^^^^^^^ + | pattern's type ::[Int] is more specialized than the right hand side expression's type List[Int] + | + | If the narrowing is intentional, this can be communicated by adding `: @unchecked` after the expression, + | which may result in a MatchError at runtime. + | This patch can be rewritten automatically under -rewrite -source 3.2-migration. -- Error: tests/neg/refutable-pattern-binding-messages.scala:16:10 ----------------------------------------------------- 16 | val 1 = 2 // error: pattern type does not match | ^ diff --git a/tests/pos/irrefutable.scala b/tests/pos/irrefutable.scala deleted file mode 100644 index 0a792b644a09..000000000000 --- a/tests/pos/irrefutable.scala +++ /dev/null @@ -1,22 +0,0 @@ -// The test which this should perform but does not -// is that f1 is recognized as irrefutable and f2 is not -// This can be recognized via the generated classes: -// -// A$$anonfun$f1$1.class -// A$$anonfun$f2$1.class -// A$$anonfun$f2$2.class -// -// The extra one in $f2$ is the filter. -// -// !!! Marking with exclamation points so maybe someday -// this test will be finished. -class A { - case class Foo[T](x: T) - - def f1(xs: List[Foo[Int]]) = { - for (Foo(x: Int) <- xs) yield x - } - def f2(xs: List[Foo[Any]]) = { - for (Foo(x: Int) <- xs) yield x - } -} diff --git a/tests/pos/t6205.scala b/tests/pos/t6205.scala index 52078bd5f46f..a50350d20376 100644 --- a/tests/pos/t6205.scala +++ b/tests/pos/t6205.scala @@ -2,7 +2,7 @@ class A[T] class Test1 { def x(backing: Map[A[_], Any]) = - for( (k: A[kt], v) <- backing) + for(case (k: A[kt], v) <- backing) yield (k: A[kt]) } diff --git a/tests/run/ReplacementMatching.scala b/tests/run/ReplacementMatching.scala index 846f1c0a0966..b233709a7cae 100644 --- a/tests/run/ReplacementMatching.scala +++ b/tests/run/ReplacementMatching.scala @@ -32,12 +32,12 @@ object Test { def groupsMatching: Unit = { val Date = """(\d+)/(\d+)/(\d+)""".r - for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { + for (case Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { assert(a == "1") assert(b == "1") assert(c == "2001") } - for (Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { + for (case Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { assert(a == "1" || a == "31") assert(b == "1" || b == "12") assert(c == "2001" || c == "2000") diff --git a/tests/run/irrefutable.check b/tests/run/irrefutable.check new file mode 100644 index 000000000000..f0ac130d17af --- /dev/null +++ b/tests/run/irrefutable.check @@ -0,0 +1,5 @@ +=== mapping xs with A.f1 === +=== mapping ys with A.f2 === +filtering Foo(1) in ys +filtering Foo(2) in ys +filtering Foo(3) in ys diff --git a/tests/run/irrefutable.scala b/tests/run/irrefutable.scala new file mode 100644 index 000000000000..c62dcebaf8fb --- /dev/null +++ b/tests/run/irrefutable.scala @@ -0,0 +1,36 @@ +// This tests that A.f1 does not filter its inputs, whereas A.f2 does. +// see also: tests/neg/irrefutable.scala for an example that exercises the requirement to insert case. + +class Lst[+T](val id: String, val underlying: List[T]) { + def map[U](f: T => U): Lst[U] = new Lst(id, underlying.map(f)) + def withFilter(f: T => Boolean): Lst.WithFilter[T] = new Lst.WithFilter(this, f) +} + +object Lst: + class WithFilter[+T](lst: Lst[T], filter: T => Boolean): + def forwardingFilter[T1](filter: T1 => Boolean): T1 => Boolean = t => + println(s"filtering $t in ${lst.id}") + filter(t) + + def map[U](f: T => U): Lst[U] = Lst(lst.id, lst.underlying.withFilter(forwardingFilter(filter)).map(f)) + +case class Foo[T](x: T) + +object A { + def f1(xs: Lst[Foo[Int]]): Lst[Int] = { + for (Foo(x: Int) <- xs) yield x + } + def f2(xs: Lst[Foo[Any]]): Lst[Int] = { + for (case Foo(x: Int) <- xs) yield x + } +} + +@main def Test = + val xs = new Lst("xs", List(Foo(1), Foo(2), Foo(3))) + println("=== mapping xs with A.f1 ===") + val xs1 = A.f1(xs) + assert(xs1.underlying == List(1, 2, 3)) + val ys = new Lst("ys", List(Foo(1: Any), Foo(2: Any), Foo(3: Any))) + println("=== mapping ys with A.f2 ===") + val ys1 = A.f2(ys) + assert(ys1.underlying == List(1, 2, 3)) diff --git a/tests/run/patmat-bind-typed.scala b/tests/run/patmat-bind-typed.scala index 10de921c5190..065babc8216c 100644 --- a/tests/run/patmat-bind-typed.scala +++ b/tests/run/patmat-bind-typed.scala @@ -1,5 +1,5 @@ object Test { - def f(xs: List[Any]) = for (key @ (dummy: String) <- xs) yield key + def f(xs: List[Any]) = for (case key @ (dummy: String) <- xs) yield key def main(args: Array[String]): Unit = { f("abc" :: Nil) foreach println diff --git a/tests/run/quoted-sematics-1.scala b/tests/run/quoted-sematics-1.scala index 84bf754dbc36..4f94c8f3c32c 100644 --- a/tests/run/quoted-sematics-1.scala +++ b/tests/run/quoted-sematics-1.scala @@ -82,7 +82,7 @@ def typeChecks(g: Gamma)(level: 0 | 1)(term: Term): Option[Type] = yield LambdaType(t, res) case App(fun, arg) => // T-App for - LambdaType(t1, t2) <- typeChecks(g)(level)(fun) + case LambdaType(t1, t2) <- typeChecks(g)(level)(fun) `t1` <- typeChecks(g)(level)(arg) yield t2 case Box(body) if level == 0 => // T-Box @@ -90,16 +90,16 @@ def typeChecks(g: Gamma)(level: 0 | 1)(term: Term): Option[Type] = case Lift(body) if level == 0 => // T-Lift for NatType <- typeChecks(g)(0)(body) yield BoxType(NatType) case Splice(body) if level == 1 => // T-Unbox - for BoxType(t) <- typeChecks(g)(0)(body) yield t + for case BoxType(t) <- typeChecks(g)(0)(body) yield t case Match(scrutinee, pat, thenp, elsep) => // T-Pat for - BoxType(t1) <- typeChecks(g)(0)(scrutinee) + case BoxType(t1) <- typeChecks(g)(0)(scrutinee) delta <- typePatChecks(g, t1)(pat) t <- typeChecks(g ++ delta)(0)(thenp) `t` <- typeChecks(g)(0)(elsep) yield t case Fix(t) if level == 0 => - for LambdaType(t1, t2) <- typeChecks(g)(0)(t) yield t2 // T-Fix + for case LambdaType(t1, t2) <- typeChecks(g)(0)(t) yield t2 // T-Fix case _ => None if res.isEmpty then println(s"Failed to type $term at level $level with environment $g") diff --git a/tests/run/t6406-regextract.scala b/tests/run/t6406-regextract.scala index 18cf28865aba..4d10d3f8775c 100644 --- a/tests/run/t6406-regextract.scala +++ b/tests/run/t6406-regextract.scala @@ -20,10 +20,10 @@ object Test extends App { val t = "Last modified 2011-07-15" val p1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r val y1: Option[String] = for { - p1(year, month, day) <- p1 findFirstIn t + case p1(year, month, day) <- p1 findFirstIn t } yield year val y2: Option[String] = for { - p1(year, month, day) <- p1 findFirstMatchIn t + case p1(year, month, day) <- p1 findFirstMatchIn t } yield year println(s"$y1 $y2") diff --git a/tests/run/t6646.scala b/tests/run/t6646.scala index b96851077bf9..d1c84455c216 100644 --- a/tests/run/t6646.scala +++ b/tests/run/t6646.scala @@ -8,9 +8,9 @@ object Test { val l = List(PrimaryKey, NoNull, lower) // withFilter must be generated in these - for (option @ NoNull <- l) println("Found " + option) - for (option @ `lower` <- l) println("Found " + option) - for ((`lower`, i) <- l.zipWithIndex) println("Found " + i) + for (case option @ NoNull <- l) println("Found " + option) + for (case option @ `lower` <- l) println("Found " + option) + for (case (`lower`, i) <- l.zipWithIndex) println("Found " + i) // no withFilter for (X <- List("A single ident is always a pattern")) println(X) diff --git a/tests/run/t6968.scala b/tests/run/t6968.scala index c4e47ba0eda8..84a0969c0872 100644 --- a/tests/run/t6968.scala +++ b/tests/run/t6968.scala @@ -1,7 +1,7 @@ object Test { def main(args: Array[String]): Unit = { val mixedList = List(1,(1,2),4,(3,1),(5,4),6) - val as = for((a,b) <- mixedList) yield a + val as = for(case (a,b) <- mixedList) yield a println(as.mkString(", ")) } } From fd079c8ac358acb9ccd2d9c1d9de7cbaa35901ae Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 3 Nov 2023 20:46:20 +0100 Subject: [PATCH 137/216] ignore problematic scala.js sources --- project/Build.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/project/Build.scala b/project/Build.scala index 13ebe9c028ae..fe9720464786 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1490,6 +1490,7 @@ object Build { ( (dir / "shared/src/test/scala" ** (("*.scala": FileFilter) -- "ReflectiveCallTest.scala" // uses many forms of structural calls that are not allowed in Scala 3 anymore + -- "UTF16Test.scala" // refutable pattern match )).get ++ (dir / "shared/src/test/require-sam" ** "*.scala").get From 763ce729734c8383951341e9076bb615cdd790e5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 11 Jan 2023 16:20:10 +0100 Subject: [PATCH 138/216] patch scala-xml for refutable patterns --- community-build/community-projects/scala-xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/scala-xml b/community-build/community-projects/scala-xml index ba33a89bdeee..e5175a666bd9 160000 --- a/community-build/community-projects/scala-xml +++ b/community-build/community-projects/scala-xml @@ -1 +1 @@ -Subproject commit ba33a89bdeee67089ff486c66ead93ab35f9250a +Subproject commit e5175a666bd9e63fcd8a61266d136773aa5b700a From 440ff3250337e4e11dd07c873025d1df90e312b3 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 11 Jan 2023 17:25:36 +0100 Subject: [PATCH 139/216] patch scalatest for refutable patterns --- community-build/community-projects/scalatest | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index 39370e391342..d430625d9621 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit 39370e391342eb3d3ecfa847be16734f2fb1f3a2 +Subproject commit d430625d96218c9031b1434cc0c2110f3740fa1c From 050af10859dd2e14090990adc8f6c833145b108f Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 28 Sep 2023 17:15:04 +0200 Subject: [PATCH 140/216] patch scala-xml again --- community-build/community-projects/scala-xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/scala-xml b/community-build/community-projects/scala-xml index e5175a666bd9..105c3dac8835 160000 --- a/community-build/community-projects/scala-xml +++ b/community-build/community-projects/scala-xml @@ -1 +1 @@ -Subproject commit e5175a666bd9e63fcd8a61266d136773aa5b700a +Subproject commit 105c3dac883549eca1182b04fc5a18fe4f5ad51a From 0bfd343794850a450d5df1b4f62c2d6a9b911f43 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 3 Nov 2023 20:59:21 +0100 Subject: [PATCH 141/216] patch scalaz for refutable for --- community-build/community-projects/scalaz | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index 6e7f3d9caf64..97cccf3b3fcb 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit 6e7f3d9caf64d8ad1c82804cf418882345f41930 +Subproject commit 97cccf3b3fcb71885a32b2e567171c0f70b06104 From 5275ab531fd716d3dd9732a1fc78599e10e4d846 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 15:59:30 +0100 Subject: [PATCH 142/216] Revert "More additions to the standard library (#18799)" This reverts commit ef97ee2a980e794e7877ead088949dc48afcefbc, reversing changes made to 5454110496c35799743a060bbe9e5e18e9e32398. --- .../dotty/tools/dotc/ast/TreeTypeMap.scala | 3 +- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 12 +- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 1 - .../dotty/tools/dotc/cc/CheckCaptures.scala | 187 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 +- .../dotty/tools/dotc/core/Definitions.scala | 2 +- .../dotty/tools/dotc/core/Substituters.scala | 2 +- .../reporting/UniqueMessagePositions.scala | 10 +- .../tools/dotc/transform/CapturedVars.scala | 55 +- .../dotty/tools/dotc/transform/Recheck.scala | 6 +- .../src/dotty/tools/dotc/typer/Namer.scala | 9 +- .../dotty/tools/dotc/typer/RefChecks.scala | 5 +- .../dotty/tools/dotc/CompilationTests.scala | 2 +- .../unchecked/uncheckedCapabilityLeaks.scala | 12 + .../unchecked/uncheckedCaptures.scala | 12 - tests/neg-custom-args/captures/buffers.check | 26 - tests/neg-custom-args/captures/buffers.scala | 30 - tests/neg-custom-args/captures/cc-this.check | 2 +- tests/neg-custom-args/captures/cc-this2.check | 14 +- .../captures/cc-this2/D_2.scala | 2 +- .../captures/exception-definitions.check | 17 +- .../captures/exception-definitions.scala | 4 +- tests/neg-custom-args/captures/filevar.scala | 2 +- .../captures/leaked-curried.check | 11 +- .../captures/leaked-curried.scala | 4 +- tests/neg-custom-args/captures/levels.check | 4 +- .../neg-custom-args/captures/localcaps.check | 12 - .../neg-custom-args/captures/localcaps.scala | 2 +- tests/neg-custom-args/captures/pairs.check | 8 - tests/neg-custom-args/captures/pairs.scala | 4 +- .../recursive-leaking-local-cap.scala | 22 - .../captures/sealed-classes.scala | 21 - .../captures/sealed-leaks.check | 50 - .../captures/sealed-leaks.scala | 32 +- .../captures/sealed-refs.scala | 42 - tests/neg/class-mods.scala | 2 +- .../captures/sealed-lowerbound.scala | 12 - .../captures/sealed-value-class.scala | 3 - tests/pos-custom-args/captures/steppers.scala | 27 - .../stdlib/collection/ArrayOps.scala | 1664 ----------- .../stdlib/collection/BitSet.scala | 348 --- .../stdlib/collection/BufferedIterator.scala | 32 - .../stdlib/collection/BuildFrom.scala | 128 - .../stdlib/collection/DefaultMap.scala | 21 - .../stdlib/collection/Factory.scala | 798 ------ .../stdlib/collection/Hashing.scala | 63 - .../stdlib/collection/IndexedSeq.scala | 6 +- .../stdlib/collection/IndexedSeqView.scala | 187 -- .../stdlib/collection/Iterable.scala | 8 +- .../stdlib/collection/IterableOnce.scala | 23 +- .../stdlib/collection/Iterator.scala | 20 +- .../stdlib/collection/JavaConverters.scala | 336 --- .../stdlib/collection/LazyZipOps.scala | 423 --- tests/pos-special/stdlib/collection/Map.scala | 21 +- .../stdlib/collection/MapView.scala | 196 -- .../stdlib/collection/Searching.scala | 58 - tests/pos-special/stdlib/collection/Seq.scala | 16 +- .../stdlib/collection/SeqMap.scala | 41 - .../stdlib/collection/SeqView.scala | 232 -- tests/pos-special/stdlib/collection/Set.scala | 271 -- .../stdlib/collection/SortedMap.scala | 222 -- .../stdlib/collection/SortedOps.scala | 91 - .../stdlib/collection/SortedSet.scala | 190 -- .../stdlib/collection/Stepper.scala | 378 --- .../stdlib/collection/StepperShape.scala | 115 - .../collection/StrictOptimizedMapOps.scala | 50 - .../collection/StrictOptimizedSeqOps.scala | 9 +- .../collection/StrictOptimizedSetOps.scala | 30 - .../StrictOptimizedSortedMapOps.scala | 47 - .../StrictOptimizedSortedSetOps.scala | 42 - .../stdlib/collection/StringOps.scala | 2 +- .../stdlib/collection/StringParsers.scala | 320 --- .../pos-special/stdlib/collection/View.scala | 2 +- .../stdlib/collection/WithFilter.scala | 72 - .../stdlib/collection/concurrent/Map.scala | 193 -- .../collection/generic/BitOperations.scala | 51 - .../generic/DefaultSerializationProxy.scala | 90 - .../collection/generic/IsIterable.scala | 165 -- .../collection/generic/IsIterableOnce.scala | 72 - .../stdlib/collection/generic/IsMap.scala | 115 - .../stdlib/collection/generic/IsSeq.scala | 123 - .../collection/generic/Subtractable.scala | 63 - .../stdlib/collection/generic/package.scala | 35 - .../collection/immutable/ArraySeq.scala | 692 ----- .../stdlib/collection/immutable/BitSet.scala | 376 --- .../collection/immutable/ChampCommon.scala | 253 -- .../stdlib/collection/immutable/HashMap.scala | 2425 ---------------- .../stdlib/collection/immutable/HashSet.scala | 2125 -------------- .../stdlib/collection/immutable/IntMap.scala | 504 ---- .../collection/immutable/Iterable.scala | 2 +- .../immutable/LazyListIterable.scala | 1376 --------- .../stdlib/collection/immutable/ListMap.scala | 373 --- .../stdlib/collection/immutable/ListSet.scala | 140 - .../stdlib/collection/immutable/LongMap.scala | 492 ---- .../stdlib/collection/immutable/Map.scala | 694 ----- .../collection/immutable/NumericRange.scala | 509 ---- .../stdlib/collection/immutable/Queue.scala | 218 -- .../stdlib/collection/immutable/Range.scala | 673 ----- .../collection/immutable/RedBlackTree.scala | 1234 -------- .../stdlib/collection/immutable/Seq.scala | 2 +- .../stdlib/collection/immutable/SeqMap.scala | 278 -- .../stdlib/collection/immutable/Set.scala | 400 --- .../collection/immutable/SortedMap.scala | 178 -- .../collection/immutable/SortedSet.scala | 58 - .../immutable/StrictOptimizedSeqOps.scala | 82 - .../stdlib/collection/immutable/TreeMap.scala | 372 --- .../collection/immutable/TreeSeqMap.scala | 651 ----- .../stdlib/collection/immutable/TreeSet.scala | 297 -- .../stdlib/collection/immutable/Vector.scala | 2476 ----------------- .../collection/immutable/VectorMap.scala | 277 -- .../collection/immutable/WrappedString.scala | 142 - .../stdlib/collection/immutable/package.scala | 29 - .../stdlib/collection/mutable/AnyRefMap.scala | 603 ---- .../collection/mutable/ArrayBuffer.scala | 406 --- .../collection/mutable/ArrayBuilder.scala | 523 ---- .../collection/mutable/ArrayDeque.scala | 646 ----- .../stdlib/collection/mutable/ArraySeq.scala | 351 --- .../stdlib/collection/mutable/BitSet.scala | 393 --- .../stdlib/collection/mutable/Buffer.scala | 5 +- .../mutable/CheckedIndexedSeqView.scala | 120 - .../stdlib/collection/mutable/Cloneable.scala | 22 - .../mutable/CollisionProofHashMap.scala | 889 ------ .../collection/mutable/GrowableBuilder.scala | 37 - .../stdlib/collection/mutable/HashMap.scala | 655 ----- .../stdlib/collection/mutable/HashSet.scala | 457 --- .../stdlib/collection/mutable/HashTable.scala | 418 --- .../collection/mutable/ImmutableBuilder.scala | 32 - .../collection/mutable/IndexedSeq.scala | 84 - .../collection/mutable/LinkedHashMap.scala | 510 ---- .../collection/mutable/LinkedHashSet.scala | 349 --- .../stdlib/collection/mutable/ListMap.scala | 83 - .../stdlib/collection/mutable/LongMap.scala | 674 ----- .../stdlib/collection/mutable/Map.scala | 271 -- .../stdlib/collection/mutable/MultiMap.scala | 116 - .../collection/mutable/OpenHashMap.scala | 307 -- .../collection/mutable/PriorityQueue.scala | 403 --- .../stdlib/collection/mutable/Queue.scala | 139 - .../collection/mutable/RedBlackTree.scala | 653 ----- .../collection/mutable/ReusableBuilder.scala | 56 - .../stdlib/collection/mutable/Set.scala | 123 - .../stdlib/collection/mutable/SortedMap.scala | 104 - .../stdlib/collection/mutable/SortedSet.scala | 49 - .../stdlib/collection/mutable/Stack.scala | 144 - .../collection/mutable/StringBuilder.scala | 2 +- .../stdlib/collection/mutable/TreeMap.scala | 258 -- .../stdlib/collection/mutable/TreeSet.scala | 219 -- .../collection/mutable/UnrolledBuffer.scala | 443 --- .../collection/mutable/WeakHashMap.scala | 56 - .../stdlib/collection/mutable/package.scala | 42 - .../stdlib/collection/package.scala | 81 - 150 files changed, 189 insertions(+), 35907 deletions(-) delete mode 100644 library/src/scala/annotation/unchecked/uncheckedCaptures.scala delete mode 100644 tests/neg-custom-args/captures/buffers.check delete mode 100644 tests/neg-custom-args/captures/buffers.scala delete mode 100644 tests/neg-custom-args/captures/localcaps.check delete mode 100644 tests/neg-custom-args/captures/recursive-leaking-local-cap.scala delete mode 100644 tests/neg-custom-args/captures/sealed-classes.scala delete mode 100644 tests/neg-custom-args/captures/sealed-leaks.check delete mode 100644 tests/neg-custom-args/captures/sealed-refs.scala delete mode 100644 tests/pos-custom-args/captures/sealed-lowerbound.scala delete mode 100644 tests/pos-custom-args/captures/sealed-value-class.scala delete mode 100644 tests/pos-custom-args/captures/steppers.scala delete mode 100644 tests/pos-special/stdlib/collection/ArrayOps.scala delete mode 100644 tests/pos-special/stdlib/collection/BitSet.scala delete mode 100644 tests/pos-special/stdlib/collection/BufferedIterator.scala delete mode 100644 tests/pos-special/stdlib/collection/BuildFrom.scala delete mode 100644 tests/pos-special/stdlib/collection/DefaultMap.scala delete mode 100644 tests/pos-special/stdlib/collection/Factory.scala delete mode 100644 tests/pos-special/stdlib/collection/Hashing.scala delete mode 100644 tests/pos-special/stdlib/collection/IndexedSeqView.scala delete mode 100644 tests/pos-special/stdlib/collection/JavaConverters.scala delete mode 100644 tests/pos-special/stdlib/collection/LazyZipOps.scala delete mode 100644 tests/pos-special/stdlib/collection/MapView.scala delete mode 100644 tests/pos-special/stdlib/collection/Searching.scala delete mode 100644 tests/pos-special/stdlib/collection/SeqMap.scala delete mode 100644 tests/pos-special/stdlib/collection/SeqView.scala delete mode 100644 tests/pos-special/stdlib/collection/Set.scala delete mode 100644 tests/pos-special/stdlib/collection/SortedMap.scala delete mode 100644 tests/pos-special/stdlib/collection/SortedOps.scala delete mode 100644 tests/pos-special/stdlib/collection/SortedSet.scala delete mode 100644 tests/pos-special/stdlib/collection/Stepper.scala delete mode 100644 tests/pos-special/stdlib/collection/StepperShape.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StringParsers.scala delete mode 100644 tests/pos-special/stdlib/collection/WithFilter.scala delete mode 100644 tests/pos-special/stdlib/collection/concurrent/Map.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/BitOperations.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsIterable.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsMap.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsSeq.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/Subtractable.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/package.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ArraySeq.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/BitSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ChampCommon.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/HashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/HashSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/IntMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ListMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ListSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/LongMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Map.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/NumericRange.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Queue.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Range.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/SeqMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Set.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/SortedMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/SortedSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/TreeMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Vector.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/VectorMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/WrappedString.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/package.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArraySeq.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/BitSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Cloneable.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/HashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/HashSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/HashTable.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ListMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/LongMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Map.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/MultiMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Queue.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Set.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/SortedMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/SortedSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Stack.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/TreeMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/TreeSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/package.scala delete mode 100644 tests/pos-special/stdlib/collection/package.scala diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index d2e18729836b..955892b2ae22 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -105,8 +105,7 @@ class TreeTypeMap( tree1.withType(mapType(tree1.tpe)) match { case id: Ident => if needsSelect(id.tpe) then - try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) - catch case ex: TypeError => super.transform(id) + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) else super.transform(id) case sel: Select => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 40e94ebde5dd..dccf07ba199e 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -206,12 +206,6 @@ extension (tp: Type) case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) case _ => false - def isSealed(using Context): Boolean = tp match - case tp: TypeParamRef => tp.underlying.isSealed - case tp: TypeBounds => tp.hi.hasAnnotation(defn.Caps_SealedAnnot) - case tp: TypeRef => tp.symbol.is(Sealed) || tp.info.isSealed // TODO: drop symbol flag? - case _ => false - /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: @@ -231,11 +225,7 @@ extension (cls: ClassSymbol) && bc.givenSelfType.dealiasKeepAnnots.match case CapturingType(_, refs) => refs.isAlwaysEmpty case RetainingType(_, refs) => refs.isEmpty - case selfType => - isCaptureChecking // At Setup we have not processed self types yet, so - // unless a self type is explicitly given, we can't tell - // and err on the side of impure. - && selfType.exists && selfType.captureSet.isAlwaysEmpty + case selfType => selfType.exists && selfType.captureSet.isAlwaysEmpty extension (sym: Symbol) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 7261c760aa01..2586d449dfd4 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -872,7 +872,6 @@ object CaptureSet: upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty - else if ctx.mode.is(Mode.Printing) then upper else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") /** Apply `f` to each element in `xs`, and join result sets with `++` */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a49bd9f79351..fab0689b4df2 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -14,14 +14,14 @@ import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPa import typer.Checking.{checkBounds, checkAppliedTypesIn} import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} -import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} +import util.{SimpleIdentitySet, EqHashMap, SrcPos, Property} import transform.SymUtils.* -import transform.{Recheck, PreRecheck, CapturedVars} +import transform.{Recheck, PreRecheck} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.{DefaultGetterName, WildcardParamName} +import NameKinds.DefaultGetterName import reporting.trace /** The capture checker */ @@ -147,49 +147,33 @@ object CheckCaptures: private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: - private val seen = new EqHashSet[TypeRef] - - /** Check that there is at least one method containing carrier and defined - * in the scope of tparam. E.g. this is OK: - * def f[T] = { ... var x: T ... } - * So is this: - * class C[T] { def f() = { class D { var x: T }}} - * But this is not OK: - * class C[T] { object o { var x: T }} - */ extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - carrier.exists && { - val encl = carrier.owner.enclosingMethodOrClass - if encl.isClass then tparam.isParametricIn(encl) - else - def recur(encl: Symbol): Boolean = - if tparam.owner == encl then true - else if encl.isStatic || !encl.exists then false - else recur(encl.owner.enclosingMethodOrClass) - recur(encl) - } + val encl = carrier.owner.enclosingMethodOrClass + if encl.isClass then tparam.isParametricIn(encl) + else + def recur(encl: Symbol): Boolean = + if tparam.owner == encl then true + else if encl.isStatic || !encl.exists then false + else recur(encl.owner.enclosingMethodOrClass) + recur(encl) def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - if !seen.contains(t) then - capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") - seen += t - t.info match - case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => - if hi.isAny then - val detailStr = - if t eq tp then "variable" - else i"refers to the type variable $t, which" - report.error( - em"""$what cannot $have $tp since - |that type $detailStr is not sealed. - |$addendum""", - pos) - else - traverse(hi) - case _ => - traverseChildren(t) + capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") + t.info match + case TypeBounds(_, hi) + if !t.symbol.is(Sealed) && !t.symbol.isParametricIn(carrier) => + if hi.isAny then + report.error( + em"""$what cannot $have $tp since + |that type refers to the type variable $t, which is not sealed. + |$addendum""", + pos) + else + traverse(hi) + case _ => + traverseChildren(t) case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () case t => @@ -276,12 +260,11 @@ class CheckCaptures extends Recheck, SymTransformer: pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, - provenance: => String = "", cs1description: String = "")(using Context) = + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = checkOK( cs1.subCaptures(cs2, frozen = false), - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" - else i"references $cs1$cs1description are not all", + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head} is not" + else i"references $cs1 are not all", pos, provenance) /** The current environment */ @@ -559,10 +542,10 @@ class CheckCaptures extends Recheck, SymTransformer: val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] - for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do - if formal.isSealed then + for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do + if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" - disallowRootCapabilitiesIn(arg.knownType, NoSymbol, + disallowRootCapabilitiesIn(arg.knownType, fn.symbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) @@ -603,58 +586,13 @@ class CheckCaptures extends Recheck, SymTransformer: openClosures = openClosures.tail end recheckClosureBlock - /** Maps mutable variables to the symbols that capture them (in the - * CheckCaptures sense, i.e. symbol is referred to from a different method - * than the one it is defined in). - */ - private val capturedBy = util.HashMap[Symbol, Symbol]() - - /** Maps anonymous functions appearing as function arguments to - * the function that is called. - */ - private val anonFunCallee = util.HashMap[Symbol, Symbol]() - - /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`. - */ - private def collectCapturedMutVars(using Context) = new TreeTraverser: - def traverse(tree: Tree)(using Context) = tree match - case id: Ident => - val sym = id.symbol - if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then - val enclMeth = ctx.owner.enclosingMethod - if sym.enclosingMethod != enclMeth then - capturedBy(sym) = enclMeth - case Apply(fn, args) => - for case closureDef(mdef) <- args do - anonFunCallee(mdef.symbol) = fn.symbol - traverseChildren(tree) - case Inlined(_, bindings, expansion) => - traverse(bindings) - traverse(expansion) - case mdef: DefDef => - if !mdef.symbol.isInlineMethod then traverseChildren(tree) - case _ => - traverseChildren(tree) - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = try if sym.is(Module) then sym.info // Modules are checked by checking the module class else if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then - val (carrier, addendum) = capturedBy.get(sym) match - case Some(encl) => - val enclStr = - if encl.isAnonymousFunction then - val location = anonFunCallee.get(encl) match - case Some(meth) if meth.exists => i" argument in a call to $meth" - case _ => "" - s"an anonymous function$location" - else encl.show - (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr") - case _ => - (sym, "") - disallowRootCapabilitiesIn( - tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) + disallowRootCapabilitiesIn(tree.tpt.knownType, sym, + i"mutable $sym", "have type", "", sym.srcPos) checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then @@ -742,15 +680,9 @@ class CheckCaptures extends Recheck, SymTransformer: if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) - def selfType = impl.body - .collect: - case TypeDef(tpnme.SELF, rhs) => rhs - .headOption - .getOrElse(tree) - .orElse(tree) checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - selfType.srcPos, cs1description = " captured by this self type") + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -1190,8 +1122,6 @@ class CheckCaptures extends Recheck, SymTransformer: override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = !setup.isPreCC(overriding) && !setup.isPreCC(overridden) - - override def checkInheritedTraitParameters: Boolean = false end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = @@ -1228,12 +1158,11 @@ class CheckCaptures extends Recheck, SymTransformer: private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup] override def checkUnit(unit: CompilationUnit)(using Context): Unit = - setup.setupUnit(unit.tpdTree, completeDef) - collectCapturedMutVars.traverse(unit.tpdTree) + setup.setupUnit(ctx.compilationUnit.tpdTree, completeDef) if ctx.settings.YccPrintSetup.value then val echoHeader = "[[syntax tree at end of cc setup]]" - val treeString = show(unit.tpdTree) + val treeString = show(ctx.compilationUnit.tpdTree) report.echo(s"$echoHeader\n$treeString\n") withCaptureSetsExplained: @@ -1369,39 +1298,6 @@ class CheckCaptures extends Recheck, SymTransformer: checker.traverse(tree.knownType) end healTypeParam - def checkNoLocalRootIn(sym: Symbol, info: Type, pos: SrcPos)(using Context): Unit = - val check = new TypeTraverser: - def traverse(tp: Type) = tp match - case tp: TermRef if tp.isLocalRootCapability => - if tp.localRootOwner == sym then - report.error(i"local root $tp cannot appear in type of $sym", pos) - case tp: ClassInfo => - traverseChildren(tp) - for mbr <- tp.decls do - if !mbr.is(Private) then checkNoLocalRootIn(sym, mbr.info, mbr.srcPos) - case _ => - traverseChildren(tp) - check.traverse(info) - - def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit = - val check = new TypeTraverser: - def traverse(t: Type): Unit = - t match - case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => - if !(pos.span.isSynthetic && ctx.reporter.errorsReported) - && !arg.typeSymbol.name.is(WildcardParamName) - then - CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, - "Array", "have element type", - "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", - pos) - traverseChildren(t) - case defn.RefinedFunctionOf(rinfo: MethodType) => - traverse(rinfo) - case _ => - traverseChildren(t) - check.traverse(tp) - /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1413,11 +1309,10 @@ class CheckCaptures extends Recheck, SymTransformer: val lctx = tree match case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol) case _ => ctx - trace(i"post check $tree"): - traverseChildren(tree)(using lctx) - check(tree) + traverseChildren(tree)(using lctx) + check(tree) def check(tree: Tree)(using Context) = tree match - case TypeApply(fun, args) => + case t @ TypeApply(fun, args) => fun.knownType.widen match case tl: PolyType => val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => @@ -1426,10 +1321,6 @@ class CheckCaptures extends Recheck, SymTransformer: checkBounds(normArgs, tl) args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) case _ => - case _: ValOrDefDef | _: TypeDef => - checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) - case tree: TypeTree => - checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 8ba53693870c..68fd79048f41 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -522,9 +522,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: tree.symbol match case cls: ClassSymbol => val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic) - && !cls.isPureClass - then + if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then // add capture set to self type of nested classes if no self type is given explicitly. val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls)) val ps1 = inContext(ctx.withOwner(cls)): @@ -707,5 +705,4 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postCheck()(using Context): Unit = for chk <- todoAtPostCheck do chk(ctx) - todoAtPostCheck.clear() end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 40370973ebf0..205d43cd07ca 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1443,7 +1443,7 @@ class Definitions { /** Base classes that are assumed to be pure for the purposes of capture checking. * Every class inheriting from a pure baseclass is pure. */ - @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass) + @tu lazy val pureBaseClasses = Set(defn.ThrowableClass) /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, */ diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index bd30177adcb4..5a641416b3e1 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -189,7 +189,7 @@ object Substituters: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 71b2636ab8ed..98fd7da3032a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter { || dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value - && (dia.pos.start to dia.pos.end).exists: offset => - positions.get((ctx.source, offset)).exists(_.hides(dia)) + && (dia.pos.start to dia.pos.end).exists(pos => + positions.get((ctx.source, pos)).exists(_.hides(dia))) override def markReported(dia: Diagnostic)(using Context): Unit = if dia.pos.exists then - for offset <- dia.pos.start to dia.pos.end do - positions.get((ctx.source, offset)) match + for (pos <- dia.pos.start to dia.pos.end) + positions.get(ctx.source, pos) match case Some(dia1) if dia1.hides(dia) => - case _ => positions((ctx.source, offset)) = dia + case _ => positions((ctx.source, pos)) = dia super.markReported(dia) } diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 202e3d72fa25..a018bbd1a3ac 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -13,20 +13,25 @@ import core.NameKinds.TempResultName import core.Constants._ import util.Store import dotty.tools.uncheckedNN -import ast.tpd.* -import compiletime.uninitialized + +import scala.compiletime.uninitialized /** This phase translates variables that are captured in closures to * heap-allocated refs. */ class CapturedVars extends MiniPhase with IdentityDenotTransformer: thisPhase => + import ast.tpd._ override def phaseName: String = CapturedVars.name override def description: String = CapturedVars.description - private val captured = util.HashSet[Symbol]() + private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = uninitialized + private def captured(using Context) = ctx.store(Captured) + + override def initContext(ctx: FreshContext): Unit = + Captured = ctx.addLocation(util.ReadOnlySet.empty) private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -52,10 +57,33 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: myRefInfo.uncheckedNN } - override def prepareForUnit(tree: Tree)(using Context): Context = - captured.clear() - atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree) - ctx + private class CollectCaptured extends TreeTraverser { + private val captured = util.HashSet[Symbol]() + def traverse(tree: Tree)(using Context) = tree match { + case id: Ident => + val sym = id.symbol + if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { + val enclMeth = ctx.owner.enclosingMethod + if (sym.enclosingMethod != enclMeth) { + report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") + captured += sym + } + } + case _ => + traverseChildren(tree) + } + def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { + traverse(tree) + captured + } + } + + override def prepareForUnit(tree: Tree)(using Context): Context = { + val captured = atPhase(thisPhase) { + CollectCaptured().runOver(ctx.compilationUnit.tpdTree) + } + ctx.fresh.updateStore(Captured, captured) + } /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile @@ -115,16 +143,3 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: object CapturedVars: val name: String = "capturedVars" val description: String = "represent vars captured by closures as heap objects" - - def collect(captured: util.HashSet[Symbol]): TreeTraverser = new: - def traverse(tree: Tree)(using Context) = tree match - case id: Ident => - val sym = id.symbol - if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then - val enclMeth = ctx.owner.enclosingMethod - if sym.enclosingMethod != enclMeth then - report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") - captured += sym - case _ => - traverseChildren(tree) -end CapturedVars diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index b15a58b98b6f..9833b3cf177f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -596,9 +596,9 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase): - withMode(Mode.Printing): - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) + atPhase(thisPhase) { + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) + } end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 5361f37c2a76..7ef552e3661c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1042,14 +1042,7 @@ class Namer { typer: Typer => tp val rhs1 = typedAheadType(rhs) - val rhsBodyType: TypeBounds = - val bounds = addVariances(rhs1.tpe).toBounds - if sym.is(Sealed) then - sym.resetFlag(Sealed) - bounds.derivedTypeBounds(bounds.lo, - AnnotatedType(bounds.hi, Annotation(defn.Caps_SealedAnnot, rhs1.span))) - else bounds - + val rhsBodyType: TypeBounds = addVariances(rhs1.tpe).toBounds val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType) def opaqueToBounds(info: Type): Type = diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index af279844f370..eef88e76971e 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -267,9 +267,6 @@ object RefChecks { if !other.is(Deferred) then checkOverride(subtypeChecker, dcl, other) end checkAll - - // Disabled for capture checking since traits can get different parameter refinements - def checkInheritedTraitParameters: Boolean = true end OverridingPairsChecker /** 1. Check all members of class `clazz` for overriding conditions. @@ -854,7 +851,7 @@ object RefChecks { checkCaseClassInheritanceInvariant() } - if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) { + if (!clazz.is(Trait)) { // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index fa89c82fc7e7..798e998ef241 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -44,7 +44,7 @@ class CompilationTests { // Run tests for legacy lazy vals compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), - compileDir("tests/pos-special/stdlib", allowDeepSubtypes), + compileDir("tests/pos-special/stdlib", defaultOptions), ) if scala.util.Properties.isJavaAtLeast("16") then diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala index e69de29bb2d1..477ac6d742f7 100644 --- a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + diff --git a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala deleted file mode 100644 index 477ac6d742f7..000000000000 --- a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala.annotation -package unchecked - -/** An annotation for mutable variables that are allowed to capture - * the root capability `cap`. Allowing this is not capture safe since - * it can cause leakage of capabilities from local scopes by assigning - * values retaining such capabilties to the annotated variable in - * an outer scope. - */ -class uncheckedCaptures extends StaticAnnotation - - diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check deleted file mode 100644 index 07acea3c48e3..000000000000 --- a/tests/neg-custom-args/captures/buffers.check +++ /dev/null @@ -1,26 +0,0 @@ --- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ -11 | var elems: Array[A] = new Array[A](10) // error // error - | ^ - | Mutable variable elems cannot have type Array[A] since - | that type refers to the type variable A, which is not sealed. --- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- -16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error - | ^^^^^^^^^^^ - | Sealed type variable A cannot be instantiated to box A^? since - | that type refers to the type variable A, which is not sealed. - | This is often caused by a local capability in an argument of constructor ArrayBuffer - | leaking as part of its result. --- Error: tests/neg-custom-args/captures/buffers.scala:11:13 ----------------------------------------------------------- -11 | var elems: Array[A] = new Array[A](10) // error // error - | ^^^^^^^^ - | Array cannot have element type A since - | that type variable is not sealed. - | Since arrays are mutable, they have to be treated like variables, - | so their element type must be sealed. --- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ -22 | val x: Array[A] = new Array[A](10) // error - | ^^^^^^^^ - | Array cannot have element type A since - | that type variable is not sealed. - | Since arrays are mutable, they have to be treated like variables, - | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/buffers.scala b/tests/neg-custom-args/captures/buffers.scala deleted file mode 100644 index 760ddab96ae5..000000000000 --- a/tests/neg-custom-args/captures/buffers.scala +++ /dev/null @@ -1,30 +0,0 @@ -import reflect.ClassTag - -class Buffer[A] - -class ArrayBuffer[sealed A: ClassTag] extends Buffer[A]: - var elems: Array[A] = new Array[A](10) - def add(x: A): this.type = ??? - def at(i: Int): A = ??? - -class ArrayBufferBAD[A: ClassTag] extends Buffer[A]: - var elems: Array[A] = new Array[A](10) // error // error - def add(x: A): this.type = ??? - def at(i: Int): A = ??? - -object ArrayBuffer: - def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error - elems = xs.toArray - def apply[sealed A: ClassTag](xs: A*) = new ArrayBuffer: - elems = xs.toArray // ok - -class EncapsArray[A: ClassTag]: - val x: Array[A] = new Array[A](10) // error - - - - - - - - diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 070e815d6d45..335302c5c259 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -12,4 +12,4 @@ -- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3 + | reference (C4.this.f : () => Int) is not included in the allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index bd9a1085d262..5e43a45b67f5 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,12 +1,6 @@ --- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- -3 | this: D^ => // error - | ^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- 2 |class D extends C: // error - | ^ - | illegal inheritance: self type D^ of class D does not conform to self type C - | of parent class C - | - | longer explanation available when compiling with `-explain` + |^ + |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class C +3 | this: D^ => diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index de1a722f73a9..b22e5e456092 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: D^ => // error + this: D^ => diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 72b88f252e59..16d623e64f7c 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,12 +1,13 @@ --- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- -3 | self: Err^ => // error - | ^^^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- +2 |class Err extends Exception: // error + |^ + |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class Throwable +3 | self: Err^ => -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2 --- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ---------------------------------------------- + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- 8 | class Err3(c: Any^) extends Exception // error - | ^ - | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of pure base class class Throwable diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index fbc9f3fd1d33..a19b751825b8 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,6 +1,6 @@ -class Err extends Exception: - self: Err^ => // error +class Err extends Exception: // error + self: Err^ => def test(c: Any^) = class Err2 extends Exception: diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 34588617c0b8..c8280e2ff3b7 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,7 +5,7 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^{cap[Service]} = uninitialized // error + var file: File^{cap[Service]} = uninitialized def log = file.write("log") def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check index 3f0a9800a4ec..c23d1516acf5 100644 --- a/tests/neg-custom-args/captures/leaked-curried.check +++ b/tests/neg-custom-args/captures/leaked-curried.check @@ -2,7 +2,10 @@ 14 | () => () => io // error | ^^ |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz --- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ---------------------------------------------------- -17 | () => () => io // error - | ^^ - |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo +-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/leaked-curried.scala:15:10 ------------------------------- +15 | class Foo extends Box, Pure: // error + | ^ + | illegal inheritance: self type Foo^{io} of class Foo does not conform to self type Pure + | of parent trait Pure + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala index f9238259e065..a7c48219b450 100644 --- a/tests/neg-custom-args/captures/leaked-curried.scala +++ b/tests/neg-custom-args/captures/leaked-curried.scala @@ -12,8 +12,8 @@ def main(): Unit = self => val get: () ->{} () ->{io} Cap^ = () => () => io // error - class Foo extends Box, Pure: + class Foo extends Box, Pure: // error val get: () ->{} () ->{io} Cap^ = - () => () => io // error + () => () => io new Foo val bad = leaked.get()().use() // using a leaked capability diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index c0cc7f0a759c..f91f90fb652f 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,8 +1,8 @@ -- Error: tests/neg-custom-args/captures/levels.scala:6:16 ------------------------------------------------------------- 6 | private var v: T = init // error | ^ - | Mutable variable v cannot have type T since - | that type variable is not sealed. + | mutable variable v cannot have type T since + | that type refers to the type variable T, which is not sealed. -- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ 17 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-custom-args/captures/localcaps.check b/tests/neg-custom-args/captures/localcaps.check deleted file mode 100644 index b09702749d10..000000000000 --- a/tests/neg-custom-args/captures/localcaps.check +++ /dev/null @@ -1,12 +0,0 @@ --- Error: tests/neg-custom-args/captures/localcaps.scala:4:12 ---------------------------------------------------------- -4 | def x: C^{cap[d]} = ??? // error - | ^^^^^^ - | `d` does not name an outer definition that represents a capture level --- Error: tests/neg-custom-args/captures/localcaps.scala:9:47 ---------------------------------------------------------- -9 | private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error - | ^^^^^^^ - | `z2` does not name an outer definition that represents a capture level --- Error: tests/neg-custom-args/captures/localcaps.scala:6:6 ----------------------------------------------------------- -6 | def y: C^{cap[C]} = ??? // error - | ^ - | local root (cap[C] : caps.Cap) cannot appear in type of class C diff --git a/tests/neg-custom-args/captures/localcaps.scala b/tests/neg-custom-args/captures/localcaps.scala index 049a1ee0d775..f5227bfef96b 100644 --- a/tests/neg-custom-args/captures/localcaps.scala +++ b/tests/neg-custom-args/captures/localcaps.scala @@ -3,7 +3,7 @@ class C: def x: C^{cap[d]} = ??? // error - def y: C^{cap[C]} = ??? // error + def y: C^{cap[C]} = ??? // ok private val z = (c0: caps.Cap) => (x: Int) => (c: C^{cap[C]}) => x // ok private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error diff --git a/tests/neg-custom-args/captures/pairs.check b/tests/neg-custom-args/captures/pairs.check index 9d1b3a76e164..38712469879f 100644 --- a/tests/neg-custom-args/captures/pairs.check +++ b/tests/neg-custom-args/captures/pairs.check @@ -12,11 +12,3 @@ | Required: Cap^ ->{d} Unit | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/pairs.scala:6:8 --------------------------------------------------------------- -6 | def fst: Cap^{cap[Pair]} ->{x} Unit = x // error - | ^ - | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair --- Error: tests/neg-custom-args/captures/pairs.scala:7:8 --------------------------------------------------------------- -7 | def snd: Cap^{cap[Pair]} ->{y} Unit = y // error - | ^ - | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair diff --git a/tests/neg-custom-args/captures/pairs.scala b/tests/neg-custom-args/captures/pairs.scala index 99b27639f729..4fc495d60f95 100644 --- a/tests/neg-custom-args/captures/pairs.scala +++ b/tests/neg-custom-args/captures/pairs.scala @@ -3,8 +3,8 @@ object Monomorphic2: class Pair(x: Cap => Unit, y: Cap => Unit): - def fst: Cap^{cap[Pair]} ->{x} Unit = x // error - def snd: Cap^{cap[Pair]} ->{y} Unit = y // error + def fst: Cap^{cap[Pair]} ->{x} Unit = x + def snd: Cap^{cap[Pair]} ->{y} Unit = y def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala deleted file mode 100644 index 0daecafbf9d0..000000000000 --- a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala +++ /dev/null @@ -1,22 +0,0 @@ -import language.experimental.captureChecking -trait Cap: - def use: Int = 42 - -def usingCap[sealed T](op: Cap^ => T): T = ??? - -def badTest(): Unit = - def bad(b: Boolean)(c: Cap^): Cap^{cap[bad]} = // error - if b then c - else - val leaked = usingCap[Cap^{cap[bad]}](bad(true)) - leaked.use // boom - c - - usingCap[Unit]: c0 => - bad(false)(c0) - -class Bad: - def foo: Cap^{cap[Bad]} = ??? // error - private def bar: Cap^{cap[Bad]} = ??? // ok - - diff --git a/tests/neg-custom-args/captures/sealed-classes.scala b/tests/neg-custom-args/captures/sealed-classes.scala deleted file mode 100644 index b8cb0acbf5c5..000000000000 --- a/tests/neg-custom-args/captures/sealed-classes.scala +++ /dev/null @@ -1,21 +0,0 @@ -abstract class C1[A1]: - def set(x: A1): Unit - def get: A1 - -trait Co[+A]: - def get: A - -class C2[sealed A2] extends C1[A2], Co[A2]: // ok - private var x: A2 = ??? - def set(x: A2): Unit = - this.x = x - def get: A2 = x - -class C3[A3] extends C2[A3] // error - -abstract class C4[sealed A4] extends Co[A4] // ok - -abstract class C5[sealed +A5] extends Co[A5] // ok - -abstract class C6[A6] extends C5[A6] // error - diff --git a/tests/neg-custom-args/captures/sealed-leaks.check b/tests/neg-custom-args/captures/sealed-leaks.check deleted file mode 100644 index f7098eba32b6..000000000000 --- a/tests/neg-custom-args/captures/sealed-leaks.check +++ /dev/null @@ -1,50 +0,0 @@ --- [E129] Potential Issue Warning: tests/neg-custom-args/captures/sealed-leaks.scala:31:6 ------------------------------ -31 | () - | ^^ - | A pure expression does nothing in statement position - | - | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:12:27 ------------------------------------------------------ -12 | val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable T cannot be instantiated to (() => Unit) | Null since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of method usingLogFile - | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/sealed-leaks.scala:19:26 --------------------------------- -19 | usingLogFile { f => x = f } // error - | ^ - | Found: (f : java.io.FileOutputStream^) - | Required: (java.io.FileOutputStream | Null)^{cap[Test2]} - | - | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:30:10 ------------------------------------------------------ -30 | var x: T = y // error - | ^ - | Mutable variable x cannot have type T since - | that type variable is not sealed. --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:39:8 ------------------------------------------------------- -39 | var x: T = y // error - | ^ - | Mutable variable x cannot have type T since - | that type variable is not sealed. - | - | Note that variable x does not count as local since it is captured by an anonymous function --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:43:8 ------------------------------------------------------- -43 | var x: T = y // error - | ^ - |Mutable variable x cannot have type T since - |that type variable is not sealed. - | - |Note that variable x does not count as local since it is captured by an anonymous function argument in a call to method identity --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:47:8 ------------------------------------------------------- -47 | var x: T = y // error - | ^ - | Mutable variable x cannot have type T since - | that type variable is not sealed. - | - | Note that variable x does not count as local since it is captured by method foo --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:11:14 ------------------------------------------------------ -11 | val later = usingLogFile { f => () => f.write(0) } // error - | ^^^^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingLogFile diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala index 2555ba8a3e07..a7acf77b5678 100644 --- a/tests/neg-custom-args/captures/sealed-leaks.scala +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -18,34 +18,4 @@ def Test2 = usingLogFile { f => x = f } // error - later() - -def Test3 = - def f[T](y: T) = - var x: T = y - () - - class C[T](y: T): - object o: - var x: T = y // error - () - - class C2[T](y: T): - def f = - var x: T = y // ok - () - - def g1[T](y: T): T => Unit = - var x: T = y // error - y => x = y - - def g2[T](y: T): T => Unit = - var x: T = y // error - identity(y => x = y) - - def g3[T](y: T): Unit = - var x: T = y // error - def foo = - x = y - () - + later() \ No newline at end of file diff --git a/tests/neg-custom-args/captures/sealed-refs.scala b/tests/neg-custom-args/captures/sealed-refs.scala deleted file mode 100644 index 05fa483acf28..000000000000 --- a/tests/neg-custom-args/captures/sealed-refs.scala +++ /dev/null @@ -1,42 +0,0 @@ -class Ref[sealed A](init: A): - this: Ref[A]^ => - private var x: A = init - def get: A = x - def set(x: A): Unit = this.x = x - -class It[X]: - this: It[X]^ => - -def f1[B1](x: B1, next: B1 -> B1) = - var r = x // ok - r = next(x) - r - -def f2[B2](x: B2, next: B2 -> B2) = - val r = Ref[B2](x) // error - r.set(next(x)) - r.get - -def g[sealed B](x: B, next: B -> B) = - val r = Ref[B](x) // ok - r.set(next(x)) - r.get - -import annotation.unchecked.uncheckedCaptures - -def h[B](x: B, next: B -> B) = - val r = Ref[B @uncheckedCaptures](x) // ok - r.set(next(x)) - r.get - -def f3[B](x: B, next: B -> B) = - val r: Ref[B^{cap[f3]}] = Ref[B^{cap[f3]}](x) // error - r.set(next(x)) - val y = r.get - () - -def f4[B](x: B, next: B -> B) = - val r: Ref[B]^{cap[f4]} = Ref[B](x) // error - r.set(next(x)) - val y = r.get - () \ No newline at end of file diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala index cf4348ad42d7..60e9fb279364 100644 --- a/tests/neg/class-mods.scala +++ b/tests/neg/class-mods.scala @@ -2,7 +2,7 @@ open final class Foo1 // error sealed open class Foo2 // error open type T1 // error -type T2 // ok +sealed type T2 // error abstract type T3 // error abstract open type T4 // error diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala deleted file mode 100644 index e848f784cddc..000000000000 --- a/tests/pos-custom-args/captures/sealed-lowerbound.scala +++ /dev/null @@ -1,12 +0,0 @@ -def foo[sealed B](x: B): B = x - -def bar[B, sealed A >: B](x: A): A = foo[A](x) - -class C[sealed A] - -class CV[sealed A](x: Int): - def this() = this: - val x = new C[A]: - println("foo") - 0 - diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala deleted file mode 100644 index b5f25bf2d203..000000000000 --- a/tests/pos-custom-args/captures/sealed-value-class.scala +++ /dev/null @@ -1,3 +0,0 @@ -class Ops[sealed A](xs: Array[A]) extends AnyVal: - - def f(p: A => Boolean): Array[A] = xs diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala deleted file mode 100644 index 815ac938b492..000000000000 --- a/tests/pos-custom-args/captures/steppers.scala +++ /dev/null @@ -1,27 +0,0 @@ - -trait Stepper[+A]: - this: Stepper[A]^ => - -object Stepper: - trait EfficientSplit - -sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure - -trait IterableOnce[+A] extends Any: - this: IterableOnce[A]^ => - def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ??? - -sealed abstract class ArraySeq[sealed T] extends IterableOnce[T], Pure: - def array: Array[_] - - def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]] - ArraySeq.make(arr).asInstanceOf[ArraySeq[T]] - -object ArraySeq: - - def make[sealed T](x: Array[T]): ArraySeq[T] = ??? - - final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure: - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ??? - diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala deleted file mode 100644 index a52fd0dbd162..000000000000 --- a/tests/pos-special/stdlib/collection/ArrayOps.scala +++ /dev/null @@ -1,1664 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import java.lang.Math.{max, min} -import java.util.Arrays -import language.experimental.captureChecking - -import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally - genericArrayOps => _, - booleanArrayOps => _, - byteArrayOps => _, - charArrayOps => _, - doubleArrayOps => _, - floatArrayOps => _, - intArrayOps => _, - longArrayOps => _, - refArrayOps => _, - shortArrayOps => _, - unitArrayOps => _, - genericWrapArray => _, - wrapRefArray => _, - wrapIntArray => _, - wrapDoubleArray => _, - wrapLongArray => _, - wrapFloatArray => _, - wrapCharArray => _, - wrapByteArray => _, - wrapShortArray => _, - wrapBooleanArray => _, - wrapUnitArray => _, - wrapString => _, - copyArrayToImmutableIndexedSeq => _, - _ -} -import scala.collection.Stepper.EfficientSplit -import scala.collection.immutable.Range -import scala.collection.mutable.ArrayBuilder -import scala.math.Ordering -import scala.reflect.ClassTag -import scala.util.Sorting - -object ArrayOps { - - @SerialVersionUID(3L) - private class ArrayView[sealed A](xs: Array[A]) extends AbstractIndexedSeqView[A] { - def length = xs.length - def apply(n: Int) = xs(n) - override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") - } - - /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ - class WithFilter[sealed A](p: A => Boolean, xs: Array[A]) { - - /** Apply `f` to each element for its side effects. - * Note: [U] parameter needed to help scalac's type inference. - */ - def foreach[U](f: A => U): Unit = { - val len = xs.length - var i = 0 - while(i < len) { - val x = xs(i) - if(p(x)) f(x) - i += 1 - } - } - - /** Builds a new array by applying a function to all elements of this array. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given function - * `f` to each element of this array and collecting the results. - */ - def map[sealed B: ClassTag](f: A => B): Array[B] = { - val b = ArrayBuilder.make[B] - var i = 0 - while (i < xs.length) { - val x = xs(i) - if(p(x)) b += f(x) - i = i + 1 - } - b.result() - } - - /** Builds a new array by applying a function to all elements of this array - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given collection-valued function - * `f` to each element of this array and concatenating the results. - */ - def flatMap[sealed B: ClassTag](f: A => IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - var i = 0 - while(i < xs.length) { - val x = xs(i) - if(p(x)) b ++= f(xs(i)) - i += 1 - } - b.result() - } - - def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = - flatMap[B](x => asIterable(f(x))) - - /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs) - } - - @SerialVersionUID(3L) - private[collection] final class ArrayIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = 0 - private[this] val len = xs.length - override def knownSize: Int = len - pos - def hasNext: Boolean = pos < len - def next(): A = { - if (pos >= xs.length) Iterator.empty.next() - val r = xs(pos) - pos += 1 - r - } - override def drop(n: Int): Iterator[A] = { - if (n > 0) { - val newPos = pos + n - pos = - if (newPos < 0 /* overflow */) len - else Math.min(len, newPos) - } - this - } - } - - @SerialVersionUID(3L) - private final class ReverseIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = xs.length-1 - def hasNext: Boolean = pos >= 0 - def next(): A = { - if (pos < 0) Iterator.empty.next() - val r = xs(pos) - pos -= 1 - r - } - - override def drop(n: Int): Iterator[A] = { - if (n > 0) pos = Math.max( -1, pos - n) - this - } - } - - @SerialVersionUID(3L) - private final class GroupedIterator[sealed A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { - private[this] var pos = 0 - def hasNext: Boolean = pos < xs.length - def next(): Array[A] = { - if(pos >= xs.length) throw new NoSuchElementException - val r = new ArrayOps(xs).slice(pos, pos+groupSize) - pos += groupSize - r - } - } - - /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to - * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. - */ - private final val MaxStableSortLength = 300 - - /** Avoid an allocation in [[collect]]. */ - private val fallback: Any => Any = _ => fallback -} - -/** This class serves as a wrapper for `Array`s with many of the operations found in - * indexed sequences. Where needed, instances of arrays are implicitly converted - * into this class. There is generally no reason to create an instance explicitly or use - * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on - * the implicit conversion to `ArrayOps` when calling a method (which does not actually - * allocate an instance of `ArrayOps` because it is a value class). - * - * Neither `Array` nor `ArrayOps` are proper collection types - * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and - * `immutable.ArraySeq` serve this purpose. - * - * The difference between this class and `ArraySeq`s is that calling transformer methods such as - * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. - * - * @tparam A type of the elements contained in this array. - */ -final class ArrayOps[sealed A](private val xs: Array[A]) extends AnyVal { - - @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - - /** The size of this array. - * - * @return the number of elements in this array. - */ - @`inline` def size: Int = xs.length - - /** The size of this array. - * - * @return the number of elements in this array. - */ - @`inline` def knownSize: Int = xs.length - - /** Tests whether the array is empty. - * - * @return `true` if the array contains no elements, `false` otherwise. - */ - @`inline` def isEmpty: Boolean = xs.length == 0 - - /** Tests whether the array is not empty. - * - * @return `true` if the array contains at least one element, `false` otherwise. - */ - @`inline` def nonEmpty: Boolean = xs.length != 0 - - /** Selects the first element of this array. - * - * @return the first element of this array. - * @throws NoSuchElementException if the array is empty. - */ - def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") - - /** Selects the last element. - * - * @return The last element of this array. - * @throws NoSuchElementException If the array is empty. - */ - def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") - - /** Optionally selects the first element. - * - * @return the first element of this array if it is nonempty, - * `None` if it is empty. - */ - def headOption: Option[A] = if(isEmpty) None else Some(head) - - /** Optionally selects the last element. - * - * @return the last element of this array$ if it is nonempty, - * `None` if it is empty. - */ - def lastOption: Option[A] = if(isEmpty) None else Some(last) - - /** Compares the size of this array to a test value. - * - * @param otherSize the test value that gets compared with the size. - * @return A value `x` where - * {{{ - * x < 0 if this.size < otherSize - * x == 0 if this.size == otherSize - * x > 0 if this.size > otherSize - * }}} - */ - def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) - - /** Compares the length of this array to a test value. - * - * @param len the test value that gets compared with the length. - * @return A value `x` where - * {{{ - * x < 0 if this.length < len - * x == 0 if this.length == len - * x > 0 if this.length > len - * }}} - */ - def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) - - /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` - * because `size` is known and comparison is constant-time. - * - * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and - * allow the following more readable usages: - * - * {{{ - * this.sizeIs < size // this.sizeCompare(size) < 0 - * this.sizeIs <= size // this.sizeCompare(size) <= 0 - * this.sizeIs == size // this.sizeCompare(size) == 0 - * this.sizeIs != size // this.sizeCompare(size) != 0 - * this.sizeIs >= size // this.sizeCompare(size) >= 0 - * this.sizeIs > size // this.sizeCompare(size) > 0 - * }}} - */ - def sizeIs: Int = xs.length - - /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` - * because `length` is known and comparison is constant-time. - * - * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and - * allow the following more readable usages: - * - * {{{ - * this.lengthIs < len // this.lengthCompare(len) < 0 - * this.lengthIs <= len // this.lengthCompare(len) <= 0 - * this.lengthIs == len // this.lengthCompare(len) == 0 - * this.lengthIs != len // this.lengthCompare(len) != 0 - * this.lengthIs >= len // this.lengthCompare(len) >= 0 - * this.lengthIs > len // this.lengthCompare(len) > 0 - * }}} - */ - def lengthIs: Int = xs.length - - /** Selects an interval of elements. The returned array is made up - * of all elements `x` which satisfy the invariant: - * {{{ - * from <= indexOf(x) < until - * }}} - * - * @param from the lowest index to include from this array. - * @param until the lowest index to EXCLUDE from this array. - * @return an array containing the elements greater than or equal to - * index `from` extending up to (but not including) index `until` - * of this array. - */ - def slice(from: Int, until: Int): Array[A] = { - import java.util.Arrays.copyOfRange - val lo = max(from, 0) - val hi = min(until, xs.length) - if (hi > lo) { - (((xs: Array[_]): @unchecked) match { - case x: Array[AnyRef] => copyOfRange(x, lo, hi) - case x: Array[Int] => copyOfRange(x, lo, hi) - case x: Array[Double] => copyOfRange(x, lo, hi) - case x: Array[Long] => copyOfRange(x, lo, hi) - case x: Array[Float] => copyOfRange(x, lo, hi) - case x: Array[Char] => copyOfRange(x, lo, hi) - case x: Array[Byte] => copyOfRange(x, lo, hi) - case x: Array[Short] => copyOfRange(x, lo, hi) - case x: Array[Boolean] => copyOfRange(x, lo, hi) - }).asInstanceOf[Array[A]] - } else new Array[A](0) - } - - /** The rest of the array without its first element. */ - def tail: Array[A] = - if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) - - /** The initial part of the array without its last element. */ - def init: Array[A] = - if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) - - /** Iterates over the tails of this array. The first value will be this - * array and the final one will be an empty array, with the intervening - * values the results of successive applications of `tail`. - * - * @return an iterator over all the tails of this array - */ - def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) - - /** Iterates over the inits of this array. The first value will be this - * array and the final one will be an empty array, with the intervening - * values the results of successive applications of `init`. - * - * @return an iterator over all the inits of this array - */ - def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) - - // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} = - Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) - - /** An array containing the first `n` elements of this array. */ - def take(n: Int): Array[A] = slice(0, n) - - /** The rest of the array without its `n` first elements. */ - def drop(n: Int): Array[A] = slice(n, xs.length) - - /** An array containing the last `n` elements of this array. */ - def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) - - /** The rest of the array without its `n` last elements. */ - def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) - - /** Takes longest prefix of elements that satisfy a predicate. - * - * @param p The predicate used to test elements. - * @return the longest prefix of this array whose elements all satisfy - * the predicate `p`. - */ - def takeWhile(p: A => Boolean): Array[A] = { - val i = indexWhere(x => !p(x)) - val hi = if(i < 0) xs.length else i - slice(0, hi) - } - - /** Drops longest prefix of elements that satisfy a predicate. - * - * @param p The predicate used to test elements. - * @return the longest suffix of this array whose first element - * does not satisfy the predicate `p`. - */ - def dropWhile(p: A => Boolean): Array[A] = { - val i = indexWhere(x => !p(x)) - val lo = if(i < 0) xs.length else i - slice(lo, xs.length) - } - - def iterator: Iterator[A] = - ((xs: Any @unchecked) match { - case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) - case null => throw new NullPointerException - }).asInstanceOf[Iterator[A]] - - def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import convert.impl._ - val s = (shape.shape: @unchecked) match { - case StepperShape.ReferenceShape => (xs: Any) match { - case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) - case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) - } - case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) - case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) - case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) - case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) - case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) - case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) - case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) - } - s.asInstanceOf[S with EfficientSplit] - } - - /** Partitions elements in fixed size arrays. - * @see [[scala.collection.Iterator]], method `grouped` - * - * @param size the number of elements per group - * @return An iterator producing arrays of size `size`, except the - * last will be less than size `size` if the elements don't divide evenly. - */ - def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) - - /** Splits this array into a prefix/suffix pair according to a predicate. - * - * Note: `c span p` is equivalent to (but more efficient than) - * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the - * predicate `p` does not cause any side-effects. - * - * @param p the test predicate - * @return a pair consisting of the longest prefix of this array whose - * elements all satisfy `p`, and the rest of this array. - */ - def span(p: A => Boolean): (Array[A], Array[A]) = { - val i = indexWhere(x => !p(x)) - val idx = if(i < 0) xs.length else i - (slice(0, idx), slice(idx, xs.length)) - } - - /** Splits this array into two at a given position. - * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. - * - * @param n the position at which to split. - * @return a pair of arrays consisting of the first `n` - * elements of this array, and the other elements. - */ - def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) - - /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ - def partition(p: A => Boolean): (Array[A], Array[A]) = { - val res1, res2 = ArrayBuilder.make[A] - var i = 0 - while(i < xs.length) { - val x = xs(i) - (if(p(x)) res1 else res2) += x - i += 1 - } - (res1.result(), res2.result()) - } - - /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one - * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second - * one made of those wrapped in [[scala.util.Right]]. - * - * Example: - * {{{ - * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { - * case i: Int => Left(i) - * case s: String => Right(s) - * } - * // xs == (Array(1, 2, 3), - * // Array(one, two, three)) - * }}} - * - * @tparam A1 the element type of the first resulting collection - * @tparam A2 the element type of the second resulting collection - * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] - * - * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], - * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[sealed A1: ClassTag, sealed A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { - val res1 = ArrayBuilder.make[A1] - val res2 = ArrayBuilder.make[A2] - var i = 0 - while(i < xs.length) { - f(xs(i)) match { - case Left(x) => res1 += x - case Right(x) => res2 += x - } - i += 1 - } - (res1.result(), res2.result()) - } - - /** Returns a new array with the elements in reversed order. */ - @inline def reverse: Array[A] = { - val len = xs.length - val res = new Array[A](len) - var i = 0 - while(i < len) { - res(len-i-1) = xs(i) - i += 1 - } - res - } - - /** An iterator yielding elements in reversed order. - * - * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. - * - * @return an iterator yielding the elements of this array in reversed order - */ - def reverseIterator: Iterator[A] = - ((xs: Any @unchecked) match { - case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) - case null => throw new NullPointerException - }).asInstanceOf[Iterator[A]] - - /** Selects all elements of this array which satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. - */ - def filter(p: A => Boolean): Array[A] = { - val res = ArrayBuilder.make[A] - var i = 0 - while(i < xs.length) { - val x = xs(i) - if(p(x)) res += x - i += 1 - } - res.result() - } - - /** Selects all elements of this array which do not satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. - */ - def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) - - /** Sorts this array according to an Ordering. - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @see [[scala.math.Ordering]] - * - * @param ord the ordering to be used to compare elements. - * @return an array consisting of the elements of this array - * sorted according to the ordering `ord`. - */ - def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { - val len = xs.length - def boxed = if(len < ArrayOps.MaxStableSortLength) { - val a = xs.clone() - Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) - a - } else { - val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) - Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) - Array.copyAs[A](a, len) - } - if(len <= 1) xs.clone() - else ((xs: Array[_]) match { - case xs: Array[AnyRef] => - val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a - case xs: Array[Int] => - if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Long] => - if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Char] => - if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Byte] => - if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Short] => - if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Boolean] => - if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } - else boxed - case xs => boxed - }).asInstanceOf[Array[A]] - } - - /** Sorts this array according to a comparison function. - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @param lt the comparison function which tests whether - * its first argument precedes its second argument in - * the desired ordering. - * @return an array consisting of the elements of this array - * sorted according to the comparison function `lt`. - */ - def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) - - /** Sorts this array according to the Ordering which results from transforming - * an implicitly given Ordering with a transformation function. - * - * @see [[scala.math.Ordering]] - * @param f the transformation function mapping elements - * to some other domain `B`. - * @param ord the ordering assumed on domain `B`. - * @tparam B the target type of the transformation `f`, and the type where - * the ordering `ord` is defined. - * @return an array consisting of the elements of this array - * sorted according to the ordering where `x < y` if - * `ord.lt(f(x), f(y))`. - */ - def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) - - /** Creates a non-strict filter of this array. - * - * Note: the difference between `c filter p` and `c withFilter p` is that - * the former creates a new array, whereas the latter only - * restricts the domain of subsequent `map`, `flatMap`, `foreach`, - * and `withFilter` operations. - * - * @param p the predicate used to test elements. - * @return an object of class `ArrayOps.WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this array - * which satisfy the predicate `p`. - */ - def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs) - - /** Finds index of first occurrence of some value in this array after or at some start index. - * - * @param elem the element value to search for. - * @param from the start index - * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - */ - def indexOf(elem: A, from: Int = 0): Int = { - var i = from - while(i < xs.length) { - if(elem == xs(i)) return i - i += 1 - } - -1 - } - - /** Finds index of the first element satisfying some predicate after or at some start index. - * - * @param p the predicate used to test elements. - * @param from the start index - * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { - var i = from - while(i < xs.length) { - if(p(xs(i))) return i - i += 1 - } - -1 - } - - /** Finds index of last occurrence of some value in this array before or at a given end index. - * - * @param elem the element value to search for. - * @param end the end index. - * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - */ - def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { - var i = min(end, xs.length-1) - while(i >= 0) { - if(elem == xs(i)) return i - i -= 1 - } - -1 - } - - /** Finds index of last element satisfying some predicate before or at given end index. - * - * @param p the predicate used to test elements. - * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { - var i = min(end, xs.length-1) - while(i >= 0) { - if(p(xs(i))) return i - i -= 1 - } - -1 - } - - /** Finds the first element of the array satisfying a predicate, if any. - * - * @param p the predicate used to test elements. - * @return an option value containing the first element in the array - * that satisfies `p`, or `None` if none exists. - */ - def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { - val idx = indexWhere(p) - if(idx == -1) None else Some(xs(idx)) - } - - /** Tests whether a predicate holds for at least one element of this array. - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` - */ - def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 - - /** Tests whether a predicate holds for all elements of this array. - * - * @param p the predicate used to test elements. - * @return `true` if this array is empty or the given predicate `p` - * holds for all elements of this array, otherwise `false`. - */ - def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { - var i = 0 - while(i < xs.length) { - if(!p(xs(i))) return false - i += 1 - } - true - } - - /** Applies a binary operator to a start value and all elements of this array, - * going left to right. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this array, - * going left to right with the start value `z` on the left: - * {{{ - * op(...op(z, x_1), x_2, ..., x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this array. - * Returns `z` if this array is empty. - */ - def foldLeft[B](z: B)(op: (B, A) => B): B = { - def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { - val length = xs.length - var v: Any = z - var i = 0 - while(i < length) { - v = op(v, xs(i)) - i += 1 - } - v - } - ((xs: Any @unchecked) match { - case null => throw new NullPointerException // null-check first helps static analysis of instanceOf - case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - }).asInstanceOf[B] - } - - /** Produces an array containing cumulative results of applying the binary - * operator going left to right. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return array with intermediate values. - * - * Example: - * {{{ - * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) - * }}} - * - */ - def scanLeft[sealed B : ClassTag](z: B)(op: (B, A) => B): Array[B] = { - var v = z - var i = 0 - val res = new Array[B](xs.length + 1) - while(i < xs.length) { - res(i) = v - v = op(v, xs(i)) - i += 1 - } - res(i) = v - res - } - - /** Computes a prefix scan of the elements of the array. - * - * Note: The neutral element `z` may be applied more than once. - * - * @tparam B element type of the resulting array - * @param z neutral element for the operator `op` - * @param op the associative operator for the scan - * - * @return a new array containing the prefix scan of the elements in this array - */ - def scan[sealed B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) - - /** Produces an array containing cumulative results of applying the binary - * operator going right to left. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return array with intermediate values. - * - * Example: - * {{{ - * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) - * }}} - * - */ - def scanRight[sealed B : ClassTag](z: B)(op: (A, B) => B): Array[B] = { - var v = z - var i = xs.length - 1 - val res = new Array[B](xs.length + 1) - res(xs.length) = z - while(i >= 0) { - v = op(xs(i), v) - res(i) = v - i -= 1 - } - res - } - - /** Applies a binary operator to all elements of this array and a start value, - * going right to left. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this array, - * going right to left with the start value `z` on the right: - * {{{ - * op(x_1, op(x_2, ... op(x_n, z)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this array. - * Returns `z` if this array is empty. - */ - def foldRight[B](z: B)(op: (A, B) => B): B = { - def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { - var v = z - var i = xs.length - 1 - while(i >= 0) { - v = op(xs(i), v) - i -= 1 - } - v - } - ((xs: Any @unchecked) match { - case null => throw new NullPointerException - case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - }).asInstanceOf[B] - - } - - /** Folds the elements of this array using the specified associative binary operator. - * - * @tparam A1 a type parameter for the binary operator, a supertype of `A`. - * @param z a neutral element for the fold operation; may be added to the result - * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication). - * @param op a binary operator that must be associative. - * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. - */ - def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) - - /** Builds a new array by applying a function to all elements of this array. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given function - * `f` to each element of this array and collecting the results. - */ - def map[sealed B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { - val len = xs.length - val ys = new Array[B](len) - if(len > 0) { - var i = 0 - (xs: Any @unchecked) match { - case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - } - } - ys - } - - def mapInPlace(f: A => A): Array[A] = { - var i = 0 - while (i < xs.length) { - xs.update(i, f(xs(i))) - i = i + 1 - } - xs - } - - /** Builds a new array by applying a function to all elements of this array - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given collection-valued function - * `f` to each element of this array and concatenating the results. - */ - def flatMap[sealed B : ClassTag](f: A => IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - var i = 0 - while(i < xs.length) { - b ++= f(xs(i)) - i += 1 - } - b.result() - } - - def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = - flatMap[B](x => asIterable(f(x))) - - /** Flattens a two-dimensional array by concatenating all its rows - * into a single array. - * - * @tparam B Type of row elements. - * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. - * @return An array obtained by concatenating rows of this array. - */ - def flatten[sealed B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { - val b = ArrayBuilder.make[B] - val len = xs.length - var size = 0 - var i = 0 - while(i < len) { - xs(i) match { - case it: IterableOnce[_] => - val k = it.knownSize - if(k > 0) size += k - case a: Array[_] => size += a.length - case _ => - } - i += 1 - } - if(size > 0) b.sizeHint(size) - i = 0 - while(i < len) { - b ++= asIterable(xs(i)) - i += 1 - } - b.result() - } - - /** Builds a new array by applying a partial function to all elements of this array - * on which the function is defined. - * - * @param pf the partial function which filters and maps the array. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[sealed B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { - val fallback: Any => Any = ArrayOps.fallback - val b = ArrayBuilder.make[B] - var i = 0 - while (i < xs.length) { - val v = pf.applyOrElse(xs(i), fallback) - if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) - i += 1 - } - b.result() - } - - /** Finds the first element of the array for which the given partial function is defined, and applies the - * partial function to it. */ - def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { - val fallback: Any => Any = ArrayOps.fallback - var i = 0 - while (i < xs.length) { - val v = pf.applyOrElse(xs(i), fallback) - if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) - i += 1 - } - None - } - - /** Returns an array formed from this array and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is longer than the other, its remaining elements are ignored. - * - * @param that The iterable providing the second half of each result pair - * @tparam B the type of the second half of the returned pairs - * @return a new array containing pairs consisting of corresponding elements of this array and `that`. - * The length of the returned array is the minimum of the lengths of this array and `that`. - */ - def zip[sealed B](that: IterableOnce[B]): Array[(A, B)] = { - val b = new ArrayBuilder.ofRef[(A, B)]() - val k = that.knownSize - b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) - var i = 0 - val it = that.iterator - while(i < xs.length && it.hasNext) { - b += ((xs(i), it.next())) - i += 1 - } - b.result() - } - - /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is - * invoked on the returned `LazyZip2` decorator. - * - * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of - * constructing and deconstructing intermediary tuples. - * - * {{{ - * val xs = List(1, 2, 3) - * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) - * // res == List(4, 8, 12) - * }}} - * - * @param that the iterable providing the second element of each eventual pair - * @tparam B the type of the second element in each eventual pair - * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs - * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. - */ - def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) - - /** Returns an array formed from this array and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is shorter than the other, - * placeholder elements are used to extend the shorter collection to the length of the longer. - * - * @param that the iterable providing the second half of each result pair - * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. - * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. - * @return a new array containing pairs consisting of corresponding elements of this array and `that`. - * The length of the returned array is the maximum of the lengths of this array and `that`. - * If this array is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this array, `thatElem` values are used to pad the result. - */ - def zipAll[sealed A1 >: A, sealed B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { - val b = new ArrayBuilder.ofRef[(A1, B)]() - val k = that.knownSize - b.sizeHint(max(k, xs.length)) - var i = 0 - val it = that.iterator - while(i < xs.length && it.hasNext) { - b += ((xs(i), it.next())) - i += 1 - } - while(it.hasNext) { - b += ((thisElem, it.next())) - i += 1 - } - while(i < xs.length) { - b += ((xs(i), thatElem)) - i += 1 - } - b.result() - } - - /** Zips this array with its indices. - * - * @return A new array containing pairs consisting of all elements of this array paired with their index. - * Indices start at `0`. - */ - def zipWithIndex: Array[(A, Int)] = { - val b = new Array[(A, Int)](xs.length) - var i = 0 - while(i < xs.length) { - b(i) = ((xs(i), i)) - i += 1 - } - b - } - - /** A copy of this array with an element appended. */ - def appended[sealed B >: A : ClassTag](x: B): Array[B] = { - val dest = Array.copyAs[B](xs, xs.length+1) - dest(xs.length) = x - dest - } - - @`inline` final def :+ [sealed B >: A : ClassTag](x: B): Array[B] = appended(x) - - /** A copy of this array with an element prepended. */ - def prepended[sealed B >: A : ClassTag](x: B): Array[B] = { - val dest = new Array[B](xs.length + 1) - dest(0) = x - Array.copy(xs, 0, dest, 1, xs.length) - dest - } - - @`inline` final def +: [sealed B >: A : ClassTag](x: B): Array[B] = prepended(x) - - /** A copy of this array with all elements of a collection prepended. */ - def prependedAll[sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - val k = prefix.knownSize - if(k >= 0) b.sizeHint(k + xs.length) - b.addAll(prefix) - if(k < 0) b.sizeHint(b.length + xs.length) - b.addAll(xs) - b.result() - } - - /** A copy of this array with all elements of an array prepended. */ - def prependedAll[sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { - val dest = Array.copyAs[B](prefix, prefix.length+xs.length) - Array.copy(xs, 0, dest, prefix.length, xs.length) - dest - } - - @`inline` final def ++: [sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) - - @`inline` final def ++: [sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) - - /** A copy of this array with all elements of a collection appended. */ - def appendedAll[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - val k = suffix.knownSize - if(k >= 0) b.sizeHint(k + xs.length) - b.addAll(xs) - b.addAll(suffix) - b.result() - } - - /** A copy of this array with all elements of an array appended. */ - def appendedAll[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { - val dest = Array.copyAs[B](xs, xs.length+suffix.length) - Array.copy(suffix, 0, dest, xs.length, suffix.length) - dest - } - - @`inline` final def :++ [sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - - @`inline` final def :++ [sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - - @`inline` final def concat[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - - @`inline` final def concat[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - - @`inline` final def ++[sealed B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) - - @`inline` final def ++[sealed B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) - - /** Tests whether this array contains a given value as an element. - * - * @param elem the element to test. - * @return `true` if this array has an element that is equal (as - * determined by `==`) to `elem`, `false` otherwise. - */ - def contains(elem: A): Boolean = exists (_ == elem) - - /** Returns a copy of this array with patched values. - * Patching at negative indices is the same as patching starting at 0. - * Patching at indices at or larger than the length of the original array appends the patch to the end. - * If more values are replaced than actually exist, the excess is ignored. - * - * @param from The start index from which to patch - * @param other The patch values - * @param replaced The number of values in the original array that are replaced by the patch. - */ - def patch[sealed B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { - val b = ArrayBuilder.make[B] - val k = other.knownSize - val r = if(replaced < 0) 0 else replaced - if(k >= 0) b.sizeHint(xs.length + k - r) - val chunk1 = if(from > 0) min(from, xs.length) else 0 - if(chunk1 > 0) b.addAll(xs, 0, chunk1) - b ++= other - val remaining = xs.length - chunk1 - r - if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) - b.result() - } - - /** Converts an array of pairs into an array of first elements and an array of second elements. - * - * @tparam A1 the type of the first half of the element pairs - * @tparam A2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this Array is a pair. - * @param ct1 a class tag for `A1` type parameter that is required to create an instance - * of `Array[A1]` - * @param ct2 a class tag for `A2` type parameter that is required to create an instance - * of `Array[A2]` - * @return a pair of Arrays, containing, respectively, the first and second half - * of each element pair of this Array. - */ - def unzip[sealed A1, sealed A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { - val a1 = new Array[A1](xs.length) - val a2 = new Array[A2](xs.length) - var i = 0 - while (i < xs.length) { - val e = asPair(xs(i)) - a1(i) = e._1 - a2(i) = e._2 - i += 1 - } - (a1, a2) - } - - /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. - * - * @tparam A1 the type of the first of three elements in the triple - * @tparam A2 the type of the second of three elements in the triple - * @tparam A3 the type of the third of three elements in the triple - * @param asTriple an implicit conversion which asserts that the element type - * of this Array is a triple. - * @param ct1 a class tag for T1 type parameter that is required to create an instance - * of Array[T1] - * @param ct2 a class tag for T2 type parameter that is required to create an instance - * of Array[T2] - * @param ct3 a class tag for T3 type parameter that is required to create an instance - * of Array[T3] - * @return a triple of Arrays, containing, respectively, the first, second, and third - * elements from each element triple of this Array. - */ - def unzip3[sealed A1, sealed A2, sealed A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], - ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { - val a1 = new Array[A1](xs.length) - val a2 = new Array[A2](xs.length) - val a3 = new Array[A3](xs.length) - var i = 0 - while (i < xs.length) { - val e = asTriple(xs(i)) - a1(i) = e._1 - a2(i) = e._2 - a3(i) = e._3 - i += 1 - } - (a1, a2, a3) - } - - /** Transposes a two dimensional array. - * - * @tparam B Type of row elements. - * @param asArray A function that converts elements of this array to rows - arrays of type `B`. - * @return An array obtained by replacing elements of this arrays with rows the represent. - */ - def transpose[sealed B](implicit asArray: A => Array[B]): Array[Array[B]] = { - val aClass = xs.getClass.getComponentType - val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) - if (xs.length == 0) bb.result() - else { - def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) - val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) - for (xs <- this) { - var i = 0 - for (x <- new ArrayOps(asArray(xs))) { - bs(i) += x - i += 1 - } - } - for (b <- new ArrayOps(bs)) bb += b.result() - bb.result() - } - } - - /** Apply `f` to each element for its side effects. - * Note: [U] parameter needed to help scalac's type inference. - */ - def foreach[U](f: A => U): Unit = { - val len = xs.length - var i = 0 - (xs: Any @unchecked) match { - case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - } - } - - /** Selects all the elements of this array ignoring the duplicates. - * - * @return a new array consisting of all the elements of this array without duplicates. - */ - def distinct: Array[A] = distinctBy(identity) - - /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying - * the transforming function `f`. - * - * @param f The transforming function whose result is used to determine the uniqueness of each element - * @tparam B the type of the elements after being transformed by `f` - * @return a new array consisting of all the elements of this array without duplicates. - */ - def distinctBy[B](f: A -> B): Array[A] = - ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() - - /** A copy of this array with an element value appended until a given target length is reached. - * - * @param len the target length - * @param elem the padding value - * @tparam B the element type of the returned array. - * @return a new array consisting of - * all elements of this array followed by the minimal number of occurrences of `elem` so - * that the resulting collection has a length of at least `len`. - */ - def padTo[sealed B >: A : ClassTag](len: Int, elem: B): Array[B] = { - var i = xs.length - val newlen = max(i, len) - val dest = Array.copyAs[B](xs, newlen) - while(i < newlen) { - dest(i) = elem - i += 1 - } - dest - } - - /** Produces the range of all indices of this sequence. - * - * @return a `Range` value from `0` to one less than the length of this array. - */ - def indices: Range = Range(0, xs.length) - - /** Partitions this array into a map of arrays according to some discriminator function. - * - * @param f the discriminator function. - * @tparam K the type of keys returned by the discriminator function. - * @return A map from keys to arrays such that the following invariant holds: - * {{{ - * (xs groupBy f)(k) = xs filter (x => f(x) == k) - * }}} - * That is, every key `k` is bound to an array of those elements `x` - * for which `f(x)` equals `k`. - */ - def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { - val m = mutable.Map.empty[K, ArrayBuilder[A]] - val len = xs.length - var i = 0 - while(i < len) { - val elem = xs(i) - val key = f(elem) - val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) - bldr += elem - i += 1 - } - m.view.mapValues(_.result()).toMap - } - - /** - * Partitions this array into a map of arrays according to a discriminator function `key`. - * Each element in a group is transformed into a value of type `B` using the `value` function. - * - * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. - * - * {{{ - * case class User(name: String, age: Int) - * - * def namesByAge(users: Array[User]): Map[Int, Array[String]] = - * users.groupMap(_.age)(_.name) - * }}} - * - * @param key the discriminator function - * @param f the element transformation function - * @tparam K the type of keys returned by the discriminator function - * @tparam B the type of values returned by the transformation function - */ - def groupMap[K, sealed B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { - val m = mutable.Map.empty[K, ArrayBuilder[B]] - val len = xs.length - var i = 0 - while(i < len) { - val elem = xs(i) - val k = key(elem) - val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) - bldr += f(elem) - i += 1 - } - m.view.mapValues(_.result()).toMap - } - - @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq - - def toIndexedSeq: immutable.IndexedSeq[A] = - immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) - - /** Copy elements of this array to another array. - * Fills the given array `xs` starting at index 0. - * Copying will stop once either all the elements of this array have been copied, - * or the end of the array is reached. - * - * @param xs the array to fill. - * @tparam B the type of the elements of the array. - */ - def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0) - - /** Copy elements of this array to another array. - * Fills the given array `xs` starting at index `start`. - * Copying will stop once either all the elements of this array have been copied, - * or the end of the array is reached. - * - * @param xs the array to fill. - * @param start the starting index within the destination array. - * @tparam B the type of the elements of the array. - */ - def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) - - /** Copy elements of this array to another array. - * Fills the given array `xs` starting at index `start` with at most `len` values. - * Copying will stop once either all the elements of this array have been copied, - * or the end of the array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index within the destination array. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the array. - */ - def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) - if (copied > 0) { - Array.copy(this.xs, 0, xs, start, copied) - } - copied - } - - /** Create a copy of this array with the specified element type. */ - def toArray[sealed B >: A: ClassTag]: Array[B] = { - val destination = new Array[B](xs.length) - copyToArray(destination, 0) - destination - } - - /** Counts the number of elements in this array which satisfy a predicate */ - def count(p: A => Boolean): Int = { - var i, res = 0 - val len = xs.length - while(i < len) { - if(p(xs(i))) res += 1 - i += 1 - } - res - } - - // can't use a default arg because we already have another overload with a default arg - /** Tests whether this array starts with the given array. */ - @`inline` def startsWith[sealed B >: A](that: Array[B]): Boolean = startsWith(that, 0) - - /** Tests whether this array contains the given array at a given index. - * - * @param that the array to test - * @param offset the index where the array is searched. - * @return `true` if the array `that` is contained in this array at - * index `offset`, otherwise `false`. - */ - def startsWith[sealed B >: A](that: Array[B], offset: Int): Boolean = { - val safeOffset = offset.max(0) - val thatl = that.length - if(thatl > xs.length-safeOffset) thatl == 0 - else { - var i = 0 - while(i < thatl) { - if(xs(i+safeOffset) != that(i)) return false - i += 1 - } - true - } - } - - /** Tests whether this array ends with the given array. - * - * @param that the array to test - * @return `true` if this array has `that` as a suffix, `false` otherwise. - */ - def endsWith[sealed B >: A](that: Array[B]): Boolean = { - val thatl = that.length - val off = xs.length - thatl - if(off < 0) false - else { - var i = 0 - while(i < thatl) { - if(xs(i+off) != that(i)) return false - i += 1 - } - true - } - } - - /** A copy of this array with one single replaced element. - * @param index the position of the replacement - * @param elem the replacing element - * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. - * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. - */ - def updated[sealed B >: A : ClassTag](index: Int, elem: B): Array[B] = { - if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") - val dest = toArray[B] - dest(index) = elem - dest - } - - @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) - - - /* ************************************************************************************************************ - The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which - may not provide the best possible performance. We need them in `ArrayOps` because their return type - mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). - ************************************************************************************************************ */ - - - /** Computes the multiset difference between this array and another sequence. - * - * @param that the sequence of elements to remove - * @return a new array which contains all elements of this array - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - */ - def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] - - /** Computes the multiset intersection between this array and another sequence. - * - * @param that the sequence of elements to intersect with. - * @return a new array which contains all elements of this array - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] - - /** Groups elements in fixed size blocks by passing a "sliding window" - * over them (as opposed to partitioning them, as is done in grouped.) - * @see [[scala.collection.Iterator]], method `sliding` - * - * @param size the number of elements per group - * @param step the distance between the first elements of successive groups - * @return An iterator producing arrays of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - */ - def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) - - /** Iterates over combinations of elements. - * - * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. - * - * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. - * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. - * - * If there is more than one way to generate the same combination, only one will be returned. - * - * For example, the result `"xy"` arbitrarily selected one of the `x` elements. - * - * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` - * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. - * - * It is not specified which of these equal combinations is returned. It is an implementation detail - * that should not be relied on. For example, the combination `"xx"` does not necessarily contain - * the first `x` in this sequence. This behavior is observable if the elements compare equal - * but are not identical. - * - * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order - * of the original sequence, but the order in which elements were selected, by "first index"; - * the order of each `x` element is also arbitrary. - * - * @return An Iterator which traverses the n-element combinations of this array - * @example {{{ - * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) - * // Array(a, b) - * // Array(a, c) - * // Array(b, b) - * // Array(b, c) - * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) - * // Array(b, b) - * // Array(b, a) - * }}} - */ - def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) - - /** Iterates over distinct permutations of elements. - * - * @return An Iterator which traverses the distinct permutations of this array. - * @example {{{ - * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) - * // Array(a, b, b) - * // Array(b, a, b) - * // Array(b, b, a) - * }}} - */ - def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) - - // we have another overload here, so we need to duplicate this method - /** Tests whether this array contains the given sequence at a given index. - * - * @param that the sequence to test - * @param offset the index where the sequence is searched. - * @return `true` if the sequence `that` is contained in this array at - * index `offset`, otherwise `false`. - */ - def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) - - // we have another overload here, so we need to duplicate this method - /** Tests whether this array ends with the given sequence. - * - * @param that the sequence to test - * @return `true` if this array has `that` as a suffix, `false` otherwise. - */ - def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) -} diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala deleted file mode 100644 index 39c15dbe808f..000000000000 --- a/tests/pos-special/stdlib/collection/BitSet.scala +++ /dev/null @@ -1,348 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.annotation.nowarn -import scala.collection.Stepper.EfficientSplit -import scala.collection.mutable.Builder -import language.experimental.captureChecking - -/** Base type of bitsets. - * - * This trait provides most of the operations of a `BitSet` independently of its representation. - * It is inherited by all concrete implementations of bitsets. - * - * @define bitsetinfo - * Bitsets are sets of non-negative integers which are represented as - * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is - * determined by the largest number stored in it. - * @define coll bitset - * @define Coll `BitSet` - */ -trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { - override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "BitSet" - override def unsorted: Set[Int] = this -} - -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." - private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." - - def empty: BitSet = immutable.BitSet.empty - def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder - def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it) - - @SerialVersionUID(3L) - private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { - - @transient protected var elems: Array[Long] = _ - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - val nwords = coll.nwords - out.writeInt(nwords) - var i = 0 - while(i < nwords) { - out.writeLong(coll.word(i)) - i += 1 - } - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val nwords = in.readInt() - elems = new Array[Long](nwords) - var i = 0 - while(i < nwords) { - elems(i) = in.readLong() - i += 1 - } - } - - protected[this] def readResolve(): Any - } -} - -/** Base implementation type of bitsets */ -trait BitSetOps[+C <: BitSet with BitSetOps[C]] - extends SortedSetOps[Int, SortedSet, C] { self => - import BitSetOps._ - - def bitSetFactory: SpecificIterableFactory[Int, C] - - def unsorted: Set[Int] - - final def ordering: Ordering[Int] = Ordering.Int - - /** The number of words (each with 64 bits) making up the set */ - protected[collection] def nwords: Int - - /** The words at index `idx`, or 0L if outside the range of the set - * '''Note:''' requires `idx >= 0` - */ - protected[collection] def word(idx: Int): Long - - /** Creates a new set of this kind from an array of longs - */ - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C - - def contains(elem: Int): Boolean = - 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L - - def iterator: Iterator[Int] = iteratorFrom(0) - - def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { - private[this] var currentPos = if (start > 0) start >> LogWL else 0 - private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) - final override def hasNext: Boolean = { - while (currentWord == 0) { - if (currentPos + 1 >= nwords) return false - currentPos += 1 - currentWord = word(currentPos) - } - true - } - final override def next(): Int = { - if (hasNext) { - val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) - currentWord &= currentWord - 1 - (currentPos << LogWL) + bitPos - } else Iterator.empty.next() - } - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { - val st = scala.collection.convert.impl.BitSetStepper.from(this) - val r = - if (shape.shape == StepperShape.IntShape) st - else { - assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") - AnyStepper.ofParIntStepper(st) - } - r.asInstanceOf[S with EfficientSplit] - } - - override def size: Int = { - var s = 0 - var i = nwords - while (i > 0) { - i -= 1 - s += java.lang.Long.bitCount(word(i)) - } - s - } - - override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) - - @inline private[this] def smallestInt: Int = { - val thisnwords = nwords - var i = 0 - while(i < thisnwords) { - val currentWord = word(i) - if (currentWord != 0L) { - return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) - } - i += 1 - } - throw new UnsupportedOperationException("empty.smallestInt") - } - - @inline private[this] def largestInt: Int = { - var i = nwords - 1 - while(i >= 0) { - val currentWord = word(i) - if (currentWord != 0L) { - return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 - } - i -= 1 - } - throw new UnsupportedOperationException("empty.largestInt") - } - - override def max[B >: Int](implicit ord: Ordering[B]): Int = - if (Ordering.Int eq ord) largestInt - else if (Ordering.Int isReverseOf ord) smallestInt - else super.max(ord) - - - override def min[B >: Int](implicit ord: Ordering[B]): Int = - if (Ordering.Int eq ord) smallestInt - else if (Ordering.Int isReverseOf ord) largestInt - else super.min(ord) - - override def foreach[U](f: Int => U): Unit = { - /* NOTE: while loops are significantly faster as of 2.11 and - one major use case of bitsets is performance. Also, there - is nothing to do when all bits are clear, so use that as - the inner loop condition. */ - var i = 0 - while (i < nwords) { - var w = word(i) - var j = i * WordLength - while (w != 0L) { - if ((w&1L) == 1L) f(j) - w = w >>> 1 - j += 1 - } - i += 1 - } - } - - /** Creates a bit mask for this set as a new array of longs - */ - def toBitMask: Array[Long] = { - val a = new Array[Long](nwords) - var i = a.length - while(i > 0) { - i -= 1 - a(i) = word(i) - } - a - } - - def rangeImpl(from: Option[Int], until: Option[Int]): C = { - val a = coll.toBitMask - val len = a.length - if (from.isDefined) { - val f = from.get - val w = f >> LogWL - val b = f & (WordLength - 1) - if (w >= 0) { - java.util.Arrays.fill(a, 0, math.min(w, len), 0) - if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) - } - } - if (until.isDefined) { - val u = until.get - val w = u >> LogWL - val b = u & (WordLength - 1) - if (w < len) { - java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) - if (w >= 0) a(w) &= (1L << b) - 1 - } - } - coll.fromBitMaskNoCopy(a) - } - - override def concat(other: collection.IterableOnce[Int]): C = other match { - case otherBitset: BitSet => - val len = coll.nwords max otherBitset.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) | otherBitset.word(idx) - fromBitMaskNoCopy(words) - case _ => super.concat(other) - } - - override def intersect(other: Set[Int]): C = other match { - case otherBitset: BitSet => - val len = coll.nwords min otherBitset.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & otherBitset.word(idx) - fromBitMaskNoCopy(words) - case _ => super.intersect(other) - } - - abstract override def diff(other: Set[Int]): C = other match { - case otherBitset: BitSet => - val len = coll.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & ~otherBitset.word(idx) - fromBitMaskNoCopy(words) - case _ => super.diff(other) - } - - /** Computes the symmetric difference of this bitset and another bitset by performing - * a bitwise "exclusive-or". - * - * @param other the other bitset to take part in the symmetric difference. - * @return a bitset containing those bits of this - * bitset or the other bitset that are not contained in both bitsets. - */ - def xor(other: BitSet): C = { - val len = coll.nwords max other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = coll.word(idx) ^ other.word(idx) - coll.fromBitMaskNoCopy(words) - } - - @`inline` final def ^ (other: BitSet): C = xor(other) - - /** - * Builds a new bitset by applying a function to all elements of this bitset - * @param f the function to apply to each element. - * @return a new bitset resulting from applying the given function ''f'' to - * each element of this bitset and collecting the results - */ - def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) - - def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) - - def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) - - override def partition(p: Int => Boolean): (C, C) = { - val left = filter(p) - (left, this &~ left) - } -} - -object BitSetOps { - - /* Final vals can sometimes be inlined as constants (faster) */ - private[collection] final val LogWL = 6 - private[collection] final val WordLength = 64 - private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 - - private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { - var len = elems.length - while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 - var newlen = len - if (idx >= newlen && w != 0L) newlen = idx + 1 - val newelems = new Array[Long](newlen) - Array.copy(elems, 0, newelems, 0, len) - if (idx < newlen) newelems(idx) = w - else assert(w == 0L) - newelems - } - - private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = - if (oldWord == 0L) 0L else { - var w = oldWord - val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) - var jmask = 1L << trailingZeroes - var j = wordIndex * BitSetOps.WordLength + trailingZeroes - val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) - while (j != maxJ) { - if ((w & jmask) != 0L) { - if (pred(j) == isFlipped) { - // j did not pass the filter here - w = w & ~jmask - } - } - jmask = jmask << 1 - j += 1 - } - w - } -} diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala deleted file mode 100644 index cca40dd31d40..000000000000 --- a/tests/pos-special/stdlib/collection/BufferedIterator.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** Buffered iterators are iterators which provide a method `head` - * that inspects the next element without discarding it. - */ -trait BufferedIterator[+A] extends Iterator[A] { - - /** Returns next element of iterator without advancing beyond it. - */ - def head: A - - /** Returns an option of the next element of an iterator without advancing beyond it. - * @return the next element of this iterator if it has a next element - * `None` if it does not - */ - def headOption : Option[A] = if (hasNext) Some(head) else None - - override def buffered: this.type = this -} diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala deleted file mode 100644 index 0a3cc199d4dc..000000000000 --- a/tests/pos-special/stdlib/collection/BuildFrom.scala +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.implicitNotFound -import scala.collection.mutable.Builder -import scala.collection.immutable.WrappedString -import scala.reflect.ClassTag -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - -/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. - * Implicit instances of `BuildFrom` are available for all collection types. - * - * @tparam From Type of source collection - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ -@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") -trait BuildFrom[-From, -A, +C] extends Any { self => - def fromSpecific(from: From)(it: IterableOnce[A]^): C - // !!! this is wrong, we need two versions of fromSpecific; one mapping - // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. - // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure - // calls in this file are needed to sweep that problem under the carpet. - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder(from: From): Builder[A, C] - - @deprecated("Use newBuilder() instead of apply()", "2.13.0") - @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) - - /** Partially apply a BuildFrom to a Factory */ - def toFactory(from: From): Factory[A, C] = new Factory[A, C] { - def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it) - def newBuilder: Builder[A, C] = self.newBuilder(from) - } -} - -object BuildFrom extends BuildFromLowPriority1 { - - /** Build the source collection type from a MapOps */ - implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { - //TODO: Reuse a prototype instance - def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) - } - - /** Build the source collection type from a SortedMapOps */ - implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { - def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) - } - - implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = - new BuildFrom[C, Int, C] { - def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it) - def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder - } - - implicit val buildFromString: BuildFrom[String, Char, String] = - new BuildFrom[String, Char, String] { - def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it) - def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder - } - - implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = - new BuildFrom[WrappedString, Char, WrappedString] { - def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it) - def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder - } - - implicit def buildFromArray[sealed A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = - new BuildFrom[Array[_], A, Array[A]] { - def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it) - def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder - } - - implicit def buildFromView[A, sealed B]: BuildFrom[View[A], B, View[B]] = - new BuildFrom[View[A], B, View[B]] { - def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure - def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder - } - -} - -trait BuildFromLowPriority1 extends BuildFromLowPriority2 { - - /** Build the source collection type from an Iterable with SortedOps */ - // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the - // implicit search space for faster compilation and reduced change of divergence. See the compilation - // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 - implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { - def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) - } - - implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = - new BuildFrom[String, A, immutable.IndexedSeq[A]] { - def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) - def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] - } -} - -trait BuildFromLowPriority2 { - /** Build the source collection type from an IterableOps */ - implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { - //TODO: Reuse a prototype instance - def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure - } - - implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { - def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder - def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure - } -} diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala deleted file mode 100644 index baa9eceadae5..000000000000 --- a/tests/pos-special/stdlib/collection/DefaultMap.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -import language.experimental.captureChecking - -/** A default map which builds a default `immutable.Map` implementation for all - * transformations. - */ -@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") -trait DefaultMap[K, +V] extends Map[K, V] diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala deleted file mode 100644 index c45776b62b9c..000000000000 --- a/tests/pos-special/stdlib/collection/Factory.scala +++ /dev/null @@ -1,798 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.collection.immutable.NumericRange -import scala.language.implicitConversions -import scala.collection.mutable.Builder -import scala.annotation.unchecked.uncheckedVariance -import scala.reflect.ClassTag -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - -/** - * A factory that builds a collection of type `C` with elements of type `A`. - * - * This is a general form of any factory ([[IterableFactory]], - * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose - * element type is fixed. - * - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ -trait Factory[-A, +C] extends Pure { - - /** - * @return A collection of type `C` containing the same elements - * as the source collection `it`. - * @param it Source collection - */ - def fromSpecific(it: IterableOnce[A]^): C - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder: Builder[A, C] -} - -object Factory { - - implicit val stringFactory: Factory[Char, String] = new StringFactory - @SerialVersionUID(3L) - private class StringFactory extends Factory[Char, String] with Serializable { - def fromSpecific(it: IterableOnce[Char]^): String = { - val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) - b ++= it - b.result() - } - def newBuilder: Builder[Char, String] = new mutable.StringBuilder() - } - - implicit def arrayFactory[sealed A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] - @SerialVersionUID(3L) - private class ArrayFactory[sealed A: ClassTag] extends Factory[A, Array[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): Array[A] = { - val b = newBuilder - b.sizeHint(scala.math.max(0, it.knownSize)) - b ++= it - b.result() - } - def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] - } - -} - -/** Base trait for companion objects of unconstrained collection types that may require - * multiple traversals of a source collection to build a target collection `CC`. - * - * @tparam CC Collection type constructor (e.g. `List`) - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait IterableFactory[+CC[_]] extends Serializable, Pure { - - /** Creates a target $coll from an existing source collection - * - * @param source Source collection - * @tparam A the type of the collection’s elements - * @return a new $coll with the elements of `source` - */ - def from[A](source: IterableOnce[A]^): CC[A]^{source} - - /** An empty collection - * @tparam A the type of the ${coll}'s elements - */ - def empty[A]: CC[A] - - /** Creates a $coll with the specified elements. - * @tparam A the type of the ${coll}'s elements - * @param elems the elements of the created $coll - * @return a new $coll with elements `elems` - */ - def apply[A](elems: A*): CC[A] = from(elems) - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained in the $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f)) - - /** Produces a $coll that uses a function `f` to produce elements of type `A` - * and update an internal state of type `S`. - * - * @param init State initial value - * @param f Computes the next element (or returns `None` to signal - * the end of the collection) - * @tparam A Type of the elements - * @tparam S Type of the internal state - * @return a $coll that produces elements using `f` until `f` returns `None` - */ - def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f)) - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) - - /** - * @return A builder for $Coll objects. - * @tparam A the type of the ${coll}’s elements - */ - def newBuilder[A]: Builder[A, CC[A]] - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem)) - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2, n3, n4, n5)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f)) - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Concatenates all argument collections into a single $coll. - * - * @param xss the collections that are to be concatenated. - * @return the concatenation of all the collections. - */ - def concat[A](xss: Iterable[A]*): CC[A] = { - from(xss.foldLeft(View.empty[A])(_ ++ _)) - } - - implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) -} - -object IterableFactory { - - /** - * Fixes the element type of `factory` to `A` - * @param factory The factory to fix the element type - * @tparam A Type of elements - * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) - * @return A [[Factory]] that uses the given `factory` to build a collection of elements - * of type `A` - */ - implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure - // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific - def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] - } - - implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = - new BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = - factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary - def newBuilder(from: Any) = factory.newBuilder - } - - @SerialVersionUID(3L) - class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { - override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) - def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it) - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] - } -} - -// !!! Needed to add this separate trait -trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]: - def from[A](source: IterableOnce[A]^): CC[A] - override def apply[A](elems: A*): CC[A] = from(elems) - -/** - * @tparam CC Collection type constructor (e.g. `List`) - */ -trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] { - import SeqFactory.UnapplySeqWrapper - final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? -} - -object SeqFactory { - @SerialVersionUID(3L) - class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { - override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) - def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it) - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] - } - - final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { - def isEmpty: false = false - def get: UnapplySeqWrapper[A] = this - def lengthCompare(len: Int): Int = c.lengthCompare(len) - def apply(i: Int): A = c(i) - def drop(n: Int): scala.Seq[A] = c match { - case seq: scala.Seq[A] => seq.drop(n) - case _ => c.view.drop(n).toSeq - } - def toSeq: scala.Seq[A] = c.toSeq - } -} - -trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { - - override def fill[A](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - override def tabulate[A](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - override def concat[A](xss: Iterable[A]*): CC[A] = { - val b = newBuilder[A] - val knownSizes = xss.view.map(_.knownSize) - if (knownSizes forall (_ >= 0)) { - b.sizeHint(knownSizes.sum) - } - for (xs <- xss) b ++= xs - b.result() - } - -} - -/** - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { - this: SpecificIterableFactory[A, C] => - - def empty: C - def apply(xs: A*): C = fromSpecific(xs) - def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) - def newBuilder: Builder[A, C] - - implicit def specificIterableFactory: Factory[A, C] = this -} - -/** - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait MapFactory[+CC[_, _]] extends Serializable, Pure { - - /** - * An empty Map - */ - def empty[K, V]: CC[K, V] - - /** - * A collection of type Map generated from given iterable object. - */ - def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V] - - /** - * A collection of type Map that contains given key/value bindings. - */ - def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) - - /** - * The default builder for Map objects. - */ - def newBuilder[K, V]: Builder[(K, V), CC[K, V]] - - /** - * The default Factory instance for maps. - */ - implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) -} - -object MapFactory { - - /** - * Fixes the key and value types of `factory` to `K` and `V`, respectively - * @param factory The factory to fix the key and value types - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) - * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` - * and values of type `V` - */ - implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) - def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] - } - - implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = - new BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) - def newBuilder(from: Any) = factory.newBuilder[K, V] - } - - @SerialVersionUID(3L) - class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { - override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) - def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it) - def empty[K, V]: C[K, V] = delegate.empty - def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder - } -} - -/** Base trait for companion objects of collections that require an implicit evidence. - * @tparam CC Collection type constructor (e.g. `ArraySeq`) - * @tparam Ev Unary type constructor for the implicit evidence required for an element type - * (typically `Ordering` or `ClassTag`) - * - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure { - - def from[E : Ev](it: IterableOnce[E]^): CC[E] - - def empty[A : Ev]: CC[A] - - def apply[A : Ev](xs: A*): CC[A] = from(xs) - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained in the $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) - - /** Produces a $coll that uses a function `f` to produce elements of type `A` - * and update an internal state of type `S`. - * - * @param init State initial value - * @param f Computes the next element (or returns `None` to signal - * the end of the collection) - * @tparam A Type of the elements - * @tparam S Type of the internal state - * @return a $coll that produces elements using `f` until `f` returns `None` - */ - def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) - - def newBuilder[A : Ev]: Builder[A, CC[A]] - - implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) -} - -object EvidenceIterableFactory { - - /** - * Fixes the element type of `factory` to `A` - * @param factory The factory to fix the element type - * @tparam A Type of elements - * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) - * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) - * @return A [[Factory]] that uses the given `factory` to build a collection of elements - * of type `A` - */ - implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it) - def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] - } - - implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) - private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it) - def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] - } - - @SerialVersionUID(3L) - class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { - override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) - def empty[A : Ev]: CC[A] = delegate.empty - def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it) - def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] - } -} - -/** Base trait for companion objects of collections that require an implicit `Ordering`. - * @tparam CC Collection type constructor (e.g. `SortedSet`) - */ -trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] - -object SortedIterableFactory { - @SerialVersionUID(3L) - class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) - extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] -} - -/** Base trait for companion objects of collections that require an implicit `ClassTag`. - * @tparam CC Collection type constructor (e.g. `ArraySeq`) - */ -trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { - - @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = - ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4, n5)(elem)) - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) -} - -object ClassTagIterableFactory { - @SerialVersionUID(3L) - class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) - extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] - - /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be - * sound depending on the use of the `ClassTag` by the collection implementation. */ - @SerialVersionUID(3L) - class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { - def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] - override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] - override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] - override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] - } -} - -/** - * @tparam CC Collection type constructor (e.g. `ArraySeq`) - */ -trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { - import SeqFactory.UnapplySeqWrapper - final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? -} - -object ClassTagSeqFactory { - @SerialVersionUID(3L) - class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) - extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] - - /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be - * sound depending on the use of the `ClassTag` by the collection implementation. */ - @SerialVersionUID(3L) - class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) - extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] -} - -trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { - - override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - -} - -/** - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait SortedMapFactory[+CC[_, _]] extends Serializable { - this: SortedMapFactory[CC] => - - def empty[K : Ordering, V]: CC[K, V] - - def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] - - def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) - - def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] - - implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) - -} - -object SortedMapFactory { - - /** - * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, - * respectively. - * - * @param factory The factory to fix the key and value types - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) - * @return A [[Factory]] that uses the given `factory` to build a map with keys of - * type `K` and values of type `V` - */ - implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) - def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] - } - - implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) - private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) - def newBuilder(from: Any) = factory.newBuilder[K, V] - } - - @SerialVersionUID(3L) - class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { - override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) - def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it) - def empty[K : Ordering, V]: CC[K, V] = delegate.empty - def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder - } -} diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala deleted file mode 100644 index 772dcf5c65da..000000000000 --- a/tests/pos-special/stdlib/collection/Hashing.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -import language.experimental.captureChecking - - -protected[collection] object Hashing { - - def elemHashCode(key: Any): Int = key.## - - def improve(hcode: Int): Int = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) - } - - def computeHash(key: Any): Int = - improve(elemHashCode(key)) - - /** - * Utility method to keep a subset of all bits in a given bitmap - * - * Example - * bitmap (binary): 00000001000000010000000100000001 - * keep (binary): 1010 - * result (binary): 00000001000000000000000100000000 - * - * @param bitmap the bitmap - * @param keep a bitmask containing which bits to keep - * @return the original bitmap with all bits where keep is not 1 set to 0 - */ - def keepBits(bitmap: Int, keep: Int): Int = { - var result = 0 - var current = bitmap - var kept = keep - while (kept != 0) { - // lowest remaining bit in current - val lsb = current ^ (current & (current - 1)) - if ((kept & 1) != 0) { - // mark bit in result bitmap - result |= lsb - } - // clear lowest remaining one bit in abm - current &= ~lsb - // look at the next kept bit - kept >>>= 1 - } - result - } - -} diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index a2d4cc942231..6e8e2bd0dc66 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -18,8 +18,6 @@ import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -35,7 +33,7 @@ trait IndexedSeq[+A] extends Seq[A] object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) /** Base trait for indexed Seq operations */ -trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self => +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => def iterator: Iterator[A] = view.iterator @@ -88,7 +86,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) - override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)) override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala deleted file mode 100644 index a16e06fa707d..000000000000 --- a/tests/pos-special/stdlib/collection/IndexedSeqView.scala +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.nowarn -import language.experimental.captureChecking - -trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { - self: IndexedSeqViewOps[A, CC, C]^ => -} - -/** View defined in terms of indexing a range */ -trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { - self: IndexedSeqView[A]^ => - - override def view: IndexedSeqView[A]^{this} = this - - @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until) - - override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this) - override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this) - - override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this) - override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n) - override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n) - override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n) - override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n) - override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f) - override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this) - override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) - override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) - - def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "IndexedSeqView" -} - -object IndexedSeqView { - - @SerialVersionUID(3L) - private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { - this: IndexedSeqViewIterator[A]^ => - private[this] var current = 0 - private[this] var remainder = self.length - override def knownSize: Int = remainder - @inline private[this] def _hasNext: Boolean = remainder > 0 - def hasNext: Boolean = _hasNext - def next(): A = - if (_hasNext) { - val r = self(current) - current += 1 - remainder -= 1 - r - } else Iterator.empty.next() - - override def drop(n: Int): Iterator[A]^{this} = { - if (n > 0) { - current += n - remainder = Math.max(0, remainder - n) - } - this - } - - override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { - - def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value - - val formatFrom = formatRange(from) - val formatUntil = formatRange(until) - remainder = Math.max(0, formatUntil - formatFrom) - current = current + formatFrom - this - } - } - @SerialVersionUID(3L) - private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { - this: IndexedSeqViewReverseIterator[A]^ => - private[this] var remainder = self.length - private[this] var pos = remainder - 1 - @inline private[this] def _hasNext: Boolean = remainder > 0 - def hasNext: Boolean = _hasNext - def next(): A = - if (_hasNext) { - val r = self(pos) - pos -= 1 - remainder -= 1 - r - } else Iterator.empty.next() - - // from < 0 means don't move pos, until < 0 means don't limit remainder - // - override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { - if (_hasNext) { - if (remainder <= from) remainder = 0 // exhausted by big skip - else if (from <= 0) { // no skip, pos is same - if (until >= 0 && until < remainder) remainder = until // ...limited by until - } - else { - pos -= from // skip ahead - if (until >= 0 && until < remainder) { // ...limited by until - if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip - else remainder = until - from // ...limited by until, less the skip - } - else remainder -= from // ...otherwise just less the skip - } - } - this - } - } - - /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ - type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _] - - @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A]^) - extends SeqView.Id(underlying) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A) - extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^) - extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^) - extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.Take(underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B) - extends SeqView.Map(underlying, f) with IndexedSeqView[B] - - @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { - override def reverse: IndexedSeqView[A] = underlying match { - case x: IndexedSeqView[A] => x - case _ => super.reverse - } - } - - @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { - protected val lo = from max 0 - protected val hi = (until max 0) min underlying.length - protected val len = (hi - lo) max 0 - @throws[IndexOutOfBoundsException] - def apply(i: Int): A = underlying(lo + i) - def length: Int = len - } -} - -/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index bca80d7be108..85c0debc6685 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -96,7 +96,7 @@ trait Iterable[+A] extends IterableOnce[A] * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that) + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) } /** Base trait for Iterable operations @@ -400,7 +400,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable if (i != headSize) fail } - iterableFactory.from(bs.map(_.result())).asInstanceOf // !!! needed for cc + iterableFactory.from(bs.map(_.result())) } def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) @@ -902,10 +902,10 @@ object IterableOps { protected def filtered: Iterable[A]^{this} = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B]^{this, f} = + def map[B](f: A => B): CC[B]^{this} = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = + def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this} = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index a88be4943c58..6836a3bac39a 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -162,10 +162,10 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") - def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) @deprecated("Use .iterator.toArray", "2.13.0") - def toArray[sealed B >: A: ClassTag]: Array[B] = it match { + def toArray[B >: A: ClassTag]: Array[B] = it match { case it: Iterable[B] => it.toArray[B] case _ => it.iterator.toArray[B] } @@ -272,11 +272,10 @@ object IterableOnce { math.max(math.min(math.min(len, srcLen), destLen - start), 0) /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ - @inline private[collection] def copyElemsToArray[A, sealed B >: A]( - elems: IterableOnce[A]^, - xs: Array[B], - start: Int = 0, - len: Int = Int.MaxValue): Int = + @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = elems match { case src: Iterable[A] => src.copyToArray[B](xs, start, len) case src => src.iterator.copyToArray[B](xs, start, len) @@ -890,7 +889,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -907,7 +906,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -924,7 +923,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * * @note Reuse: $consumesIterator */ - def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { val it = iterator var i = start val end = start + math.min(len, xs.length - start) @@ -1313,13 +1312,13 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") @`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream) - @`inline` final def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) /** Convert collection to array. * * Implementation note: DO NOT call [[Array.from]] from this method. */ - def toArray[sealed B >: A: ClassTag]: Array[B] = + def toArray[B >: A: ClassTag]: Array[B] = if (knownSize >= 0) { val destination = new Array[B](knownSize) copyToArray(destination, 0) diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 90fd387069b0..ecd8d985bbf0 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -17,7 +17,7 @@ import scala.annotation.tailrec import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.runtime.Statics import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures +import caps.unsafe.unsafeAssumePure /** Iterators are data structures that allow to iterate over a sequence @@ -258,7 +258,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } // segment must have data, and must be complete unless they allow partial val ok = index > 0 && (partial || index == size) - if (ok) buffer = builder.result().asInstanceOf[Array[B @uncheckedCaptures]] + if (ok) buffer = builder.result().asInstanceOf[Array[B]] else prev = null ok } @@ -416,9 +416,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = - ArrayBuffer.from[A @uncheckedCaptures](this).scanRight(z)(op).iterator - // @uncheckedCaptures is safe since the ArrayBuffer is local temporrary storage + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = ArrayBuffer.from(this).scanRight(z)(op).iterator def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) @@ -561,7 +559,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B @uncheckedCaptures] + private[this] val traversedValues = mutable.HashSet.empty[B] private[this] var nextElementDefined: Boolean = false private[this] var nextElement: A = _ @@ -704,7 +702,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ private[this] var status = 0 private def store(a: A): Unit = { - if (lookahead == null) lookahead = new mutable.Queue[A @uncheckedCaptures] + if (lookahead == null) lookahead = new mutable.Queue[A] lookahead += a } def hasNext = { @@ -867,8 +865,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note Reuse: $consumesOneAndProducesTwoIterators */ def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { - val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures] - var ahead: Iterator[A @uncheckedCaptures] = null // ahead is captured by Partner, so A is not recognized as parametric + val gap = new scala.collection.mutable.Queue[A] + var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { val thisSize = self.knownSize @@ -1145,7 +1143,9 @@ object Iterator extends IterableFactory[Iterator] { * Nested ConcatIterators are merged to avoid blowing the stack. */ private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { - private var current: Iterator[A @uncheckedCaptures]^{cap[ConcatIterator]} = from + private var current: Iterator[A @uncheckedCaptures] = from.unsafeAssumePure + // This should be Iteratpr[A]^, but fails since mutable variables can't capture cap. + // To do better we'd need to track nesting levels for universal capabiltities. private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var currentHasNextChecked = false diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala deleted file mode 100644 index 69130eae1829..000000000000 --- a/tests/pos-special/stdlib/collection/JavaConverters.scala +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import java.util.{concurrent => juc} -import java.{lang => jl, util => ju} - -import scala.collection.convert._ -import scala.language.implicitConversions -import language.experimental.captureChecking - -/** A variety of decorators that enable converting between - * Scala and Java collections using extension methods, `asScala` and `asJava`. - * - * The extension methods return adapters for the corresponding API. - * - * The following conversions are supported via `asScala` and `asJava`: - *{{{ - * scala.collection.Iterable <=> java.lang.Iterable - * scala.collection.Iterator <=> java.util.Iterator - * scala.collection.mutable.Buffer <=> java.util.List - * scala.collection.mutable.Set <=> java.util.Set - * scala.collection.mutable.Map <=> java.util.Map - * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap - *}}} - * The following conversions are supported via `asScala` and through - * specially-named extension methods to convert to Java collections, as shown: - *{{{ - * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) - * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) - * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) - *}}} - * In addition, the following one-way conversions are provided via `asJava`: - *{{{ - * scala.collection.Seq => java.util.List - * scala.collection.mutable.Seq => java.util.List - * scala.collection.Set => java.util.Set - * scala.collection.Map => java.util.Map - *}}} - * The following one way conversion is provided via `asScala`: - *{{{ - * java.util.Properties => scala.collection.mutable.Map - *}}} - * In all cases, converting from a source type to a target type and back - * again will return the original source object. For example: - * {{{ - * import scala.collection.JavaConverters._ - * - * val source = new scala.collection.mutable.ListBuffer[Int] - * val target: java.util.List[Int] = source.asJava - * val other: scala.collection.mutable.Buffer[Int] = target.asScala - * assert(source eq other) - * }}} - * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. - * {{{ - * scala> val vs = java.util.Arrays.asList("hi", "bye") - * vs: java.util.List[String] = [hi, bye] - * - * scala> val ss = asScalaIterator(vs.iterator) - * ss: Iterator[String] = - * - * scala> .toList - * res0: List[String] = List(hi, bye) - * - * scala> val ss = asScalaBuffer(vs) - * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) - * }}} - */ -@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") -object JavaConverters extends AsJavaConverters with AsScalaConverters { - @deprecated("Use `asJava` instead", "2.13.0") - def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) - - @deprecated("Use `asJava` instead", "2.13.0") - def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) - - @deprecated("Use `asJava` instead", "2.13.0") - def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) - - @deprecated("Use `asJava` instead", "2.13.0") - def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) - - @deprecated("Use `asJava` instead", "2.13.0") - def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) - - @deprecated("Use `asJava` instead", "2.13.0") - def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) - - - @deprecated("Use `asScala` instead", "2.13.0") - def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) - - @deprecated("Use `asScala` instead", "2.13.0") - def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) - - @deprecated("Use `asScala` instead", "2.13.0") - def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) - - @deprecated("Use `asScala` instead", "2.13.0") - def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) - - @deprecated("Use `asScala` instead", "2.13.0") - def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) - - @deprecated("Use `asScala` instead", "2.13.0") - def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) - - // Deprecated implicit conversions for code that directly imports them - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. - * @see [[asJavaIterator]] - */ - implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = - new AsJava(asJavaIterator(i)) - - /** - * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. - * @see [[asJavaEnumeration]] - */ - implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = - new AsJavaEnumeration(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. - * @see [[asJavaIterable]] - */ - implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = - new AsJava(asJavaIterable(i)) - - /** - * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. - * @see [[asJavaCollection]] - */ - implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = - new AsJavaCollection(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. - * @see [[bufferAsJavaList]] - */ - implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = - new AsJava(bufferAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. - * @see [[mutableSeqAsJavaList]] - */ - implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = - new AsJava(mutableSeqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. - * @see [[seqAsJavaList]] - */ - implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = - new AsJava(seqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. - * @see [[mutableSetAsJavaSet]] - */ - implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = - new AsJava(mutableSetAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. - * @see [[setAsJavaSet]] - */ - implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = - new AsJava(setAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. - * @see [[mutableMapAsJavaMap]] - */ - implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = - new AsJava(mutableMapAsJavaMap(m)) - - /** - * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * @see [[asJavaDictionary]] - */ - implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = - new AsJavaDictionary(m) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. - * @see [[mapAsJavaMap]] - */ - implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = - new AsJava(mapAsJavaMap(m)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * @see [[mapAsJavaConcurrentMap]]. - */ - implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = - new AsJava(mapAsJavaConcurrentMap(m)) - - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. - * @see [[asScalaIterator]] - */ - implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = - new AsScala(asScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. - * @see [[enumerationAsScalaIterator]] - */ - implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = - new AsScala(enumerationAsScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. - * @see [[iterableAsScalaIterable]] - */ - implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = - new AsScala(iterableAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. - * @see [[collectionAsScalaIterable]] - */ - implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = - new AsScala(collectionAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. - * @see [[asScalaBuffer]] - */ - implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = - new AsScala(asScalaBuffer(l)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. - * @see [[asScalaSet]] - */ - implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = - new AsScala(asScalaSet(s)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. - * @see [[mapAsScalaMap]] - */ - implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = - new AsScala(mapAsScalaMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. - * @see [[mapAsScalaConcurrentMap]] - */ - implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = - new AsScala(mapAsScalaConcurrentMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. - * @see [[dictionaryAsScalaMap]] - */ - implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = - new AsScala(dictionaryAsScalaMap(p)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. - * @see [[propertiesAsScalaMap]] - */ - implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = - new AsScala(propertiesAsScalaMap(p)) - - - /** Generic class containing the `asJava` converter method */ - class AsJava[A](op: => A) { - /** Converts a Scala collection to the corresponding Java collection */ - def asJava: A = op - } - - /** Generic class containing the `asScala` converter method */ - class AsScala[A](op: => A) { - /** Converts a Java collection to the corresponding Scala collection */ - def asScala: A = op - } - - /** Generic class containing the `asJavaCollection` converter method */ - class AsJavaCollection[A](i: Iterable[A]) { - /** Converts a Scala `Iterable` to a Java `Collection` */ - def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) - } - - /** Generic class containing the `asJavaEnumeration` converter method */ - class AsJavaEnumeration[A](i: Iterator[A]) { - /** Converts a Scala `Iterator` to a Java `Enumeration` */ - def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) - } - - /** Generic class containing the `asJavaDictionary` converter method */ - class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { - /** Converts a Scala `Map` to a Java `Dictionary` */ - def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) - } -} diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala deleted file mode 100644 index 1bb4173d219f..000000000000 --- a/tests/pos-special/stdlib/collection/LazyZipOps.scala +++ /dev/null @@ -1,423 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.language.implicitConversions -import language.experimental.captureChecking - -/** Decorator representing lazily zipped pairs. - * - * @define coll pair - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) { - - /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are - * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. - * - * @param that the iterable providing the third element of each eventual triple - * @tparam B the type of the third element in each eventual triple - * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or - * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. - */ - def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that) - - def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - def hasNext = elems1.hasNext && elems2.hasNext - def next() = f(elems1.next(), elems2.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty - }) - } - - def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] var _current: Iterator[B] = Iterator.empty - private def current = { - while (!_current.hasNext && elems1.hasNext && elems2.hasNext) - _current = f(elems1.next(), elems2.next()).iterator - _current - } - def hasNext = current.hasNext - def next() = current.next() - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty - }) - } - - def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { - bf.fromSpecific(src)(new AbstractView[(El1, El2)] { - def iterator = new AbstractIterator[(El1, El2)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] var _current: (El1, El2) = _ - private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext) { - val e1 = elems1.next() - val e2 = elems2.next() - if (p(e1, e2)) _current = (e1, e2) - } - _current - } - def hasNext = current ne null - def next() = { - val c = current - if (c ne null) { - _current = null - c - } else Iterator.empty.next() - } - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.hasNext - }) - } - - def exists(p: (El1, El2) => Boolean): Boolean = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - var res = false - - while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) - - res - } - - def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) - - def foreach[U](f: (El1, El2) => U): Unit = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - - while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) - } - - private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { - def iterator = new AbstractIterator[(El1, El2)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - def hasNext = elems1.hasNext && elems2.hasNext - def next() = (elems1.next(), elems2.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty - } - - private def zipKnownSize: Int = { - val s1 = coll1.knownSize - if (s1 == 0) 0 else { - val s2 = coll2.knownSize - if (s2 == 0) 0 else s1 min s2 - } - } - - override def toString = s"$coll1.lazyZip($coll2)" -} - -object LazyZip2 { - implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable -} - - -/** Decorator representing lazily zipped triples. - * - * @define coll triple - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, - coll1: Iterable[El1]^, - coll2: Iterable[El2]^, - coll3: Iterable[El3]^) { - - /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are - * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. - * - * @param that the iterable providing the fourth element of each eventual 4-tuple - * @tparam B the type of the fourth element in each eventual 4-tuple - * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. - * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. - */ - def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that) - - def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext - def next() = f(elems1.next(), elems2.next(), elems3.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty - }) - } - - def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] var _current: Iterator[B] = Iterator.empty - private def current = { - while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) - _current = f(elems1.next(), elems2.next(), elems3.next()).iterator - _current - } - def hasNext = current.hasNext - def next() = current.next() - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { - bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { - def iterator = new AbstractIterator[(El1, El2, El3)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] var _current: (El1, El2, El3) = _ - private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { - val e1 = elems1.next() - val e2 = elems2.next() - val e3 = elems3.next() - if (p(e1, e2, e3)) _current = (e1, e2, e3) - } - _current - } - def hasNext = current ne null - def next() = { - val c = current - if (c ne null) { - _current = null - c - } else Iterator.empty.next() - } - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def exists(p: (El1, El2, El3) => Boolean): Boolean = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - var res = false - - while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) - res = p(elems1.next(), elems2.next(), elems3.next()) - - res - } - - def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) - - def foreach[U](f: (El1, El2, El3) => U): Unit = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - - while (elems1.hasNext && elems2.hasNext && elems3.hasNext) - f(elems1.next(), elems2.next(), elems3.next()) - } - - private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { - def iterator = new AbstractIterator[(El1, El2, El3)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext - def next() = (elems1.next(), elems2.next(), elems3.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty - } - - private def zipKnownSize: Int = { - val s1 = coll1.knownSize - if (s1 == 0) 0 else { - val s2 = coll2.knownSize - if (s2 == 0) 0 else { - val s3 = coll3.knownSize - if (s3 == 0) 0 else s1 min s2 min s3 - } - } - } - - override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" -} - -object LazyZip3 { - implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable -} - - - -/** Decorator representing lazily zipped 4-tuples. - * - * @define coll tuple - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, - coll1: Iterable[El1]^, - coll2: Iterable[El2]^, - coll3: Iterable[El3]^, - coll4: Iterable[El4]^) { - - def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext - def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty - }) - } - - def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - private[this] var _current: Iterator[B] = Iterator.empty - private def current = { - while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) - _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator - _current - } - def hasNext = current.hasNext - def next() = current.next() - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { - bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { - def iterator = new AbstractIterator[(El1, El2, El3, El4)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - private[this] var _current: (El1, El2, El3, El4) = _ - private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { - val e1 = elems1.next() - val e2 = elems2.next() - val e3 = elems3.next() - val e4 = elems4.next() - if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) - } - _current - } - def hasNext = current ne null - def next() = { - val c = current - if (c ne null) { - _current = null - c - } else Iterator.empty.next() - } - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - val elems4 = coll4.iterator - var res = false - - while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) - res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) - - res - } - - def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) - - def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - val elems4 = coll4.iterator - - while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) - f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) - } - - private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { - def iterator = new AbstractIterator[(El1, El2, El3, El4)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext - def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty - } - - private def zipKnownSize: Int = { - val s1 = coll1.knownSize - if (s1 == 0) 0 else { - val s2 = coll2.knownSize - if (s2 == 0) 0 else { - val s3 = coll3.knownSize - if (s3 == 0) 0 else { - val s4 = coll4.knownSize - if (s4 == 0) 0 else s1 min s2 min s3 min s4 - } - } - } - } - - override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" -} - -object LazyZip4 { - implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = - zipped4.toIterable -} diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index 8ab25a3c13e0..ef4f915ea573 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -18,15 +18,13 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure /** Base Map type */ trait Map[K, +V] extends Iterable[(K, V)] with MapOps[K, V, Map, Map[K, V]] with MapFactoryDefaults[K, V, Map, Iterable] - with Equals - with Pure { + with Equals { def mapFactory: scala.collection.MapFactory[Map] = Map @@ -104,9 +102,8 @@ trait Map[K, +V] trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends IterableOps[(K, V), Iterable, C] with PartialFunction[K, V] { - this: MapOps[K, V, CC, C]^ => - override def view: MapView[K, V]^{this} = new MapView.Id(this) + override def view: MapView[K, V] = new MapView.Id(this) /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { @@ -255,7 +252,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * the predicate `p`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) + def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. @@ -263,7 +260,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) + def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) /** Defines the default value computation for the map, * returned when a key is not found @@ -356,7 +353,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] @deprecated("Consider requiring an immutable Map.", "2.13.0") @`inline` def -- (keys: IterableOnce[K]^): C = { lazy val keysSet = keys.iterator.to(immutable.Set) - fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") @@ -377,17 +374,17 @@ object MapOps { */ @SerialVersionUID(3L) class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( - self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^, + self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _], p: ((K, V)) => Boolean ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} = + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{p, q} = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala deleted file mode 100644 index ac9e88466052..000000000000 --- a/tests/pos-special/stdlib/collection/MapView.scala +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.nowarn -import scala.collection.MapView.SomeMapOps -import scala.collection.mutable.Builder -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - -trait MapView[K, +V] - extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] - with View[(K, V)] { - this: MapView[K, V]^ => - - override def view: MapView[K, V]^{this} = this - - // Ideally this returns a `View`, but bincompat - /** Creates a view over all keys of this map. - * - * @return the keys of this map as a view. - */ - override def keys: Iterable[K]^{this} = new MapView.Keys(this) - - // Ideally this returns a `View`, but bincompat - /** Creates a view over all values of this map. - * - * @return the values of this map as a view. - */ - override def values: Iterable[V]^{this} = new MapView.Values(this) - - /** Filters this map by retaining only keys satisfying a predicate. - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) - - override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred) - - override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred) - - override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p)) - - override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f) - - def mapFactory: MapViewFactory = MapView - - override def empty: MapView[K, V] = mapFactory.empty - - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p) - - override def toString: String = super[View].toString - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "MapView" -} - -object MapView extends MapViewFactory { - - /** An `IterableOps` whose collection type and collection type constructor are unknown */ - type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] - /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ - type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] - - @SerialVersionUID(3L) - object EmptyMapView extends AbstractMapView[Any, Nothing] { - // !!! cc problem: crash when we replace the line with - // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { - override def get(key: Any): Option[Nothing] = None - override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] - override def knownSize: Int = 0 - override def isEmpty: Boolean = true - override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this - override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this - override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this - override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this - override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) - } - - @SerialVersionUID(3L) - class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] { - def get(key: K): Option[V] = underlying.get(key) - def iterator: Iterator[(K, V)]^{this} = underlying.iterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - // Ideally this is public, but bincompat - @SerialVersionUID(3L) - private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] { - def iterator: Iterator[K]^{this} = underlying.keysIterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - // Ideally this is public, but bincompat - @SerialVersionUID(3L) - private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] { - def iterator: Iterator[V]^{this} = underlying.valuesIterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] { - def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2))) - def get(key: K): Option[W] = underlying.get(key).map(f) - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) } - def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - @SerialVersionUID(3L) - class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped) - def get(key: K): Option[V] = underlying.get(key) match { - case s @ Some(v) if p((key, v)) != isFlipped => s - case _ => None - } - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - @SerialVersionUID(3L) - class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] { - override def get(key: K): Option[V] = { - underlying.get(key) match { - case s @ Some(v) => - f((key, v)) - s - case None => None - } - } - override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f) - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - override def newBuilder[sealed X, sealed Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) - - override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] - - override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] = - View.from(it).unsafeAssumePure - // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory, - // and the latter assumes maps are strict, so from's result captures nothing. - - override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match { - case mv: MapView[K, V] => mv - case other => new MapView.Id(other) - } - - override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) -} - -trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { - - def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] - - def empty[X, Y]: MapView[X, Y] - - def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} - - override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) -} - -/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]: - this: AbstractMapView[K, V]^ => - diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala deleted file mode 100644 index f5139422e24c..000000000000 --- a/tests/pos-special/stdlib/collection/Searching.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.language.implicitConversions -import scala.collection.generic.IsSeq -import language.experimental.captureChecking - -object Searching { - - /** The result of performing a search on a sorted sequence - * - * Example usage: - * - * {{{ - * val list = List(1, 3, 4, 5) // list must be sorted before searching - * list.search(4) // Found(2) - * list.search(2) // InsertionPoint(1) - * }}} - * - * */ - sealed abstract class SearchResult { - /** The index corresponding to the element searched for in the sequence, if it was found, - * or the index where the element would be inserted in the sequence, if it was not in the sequence */ - def insertionPoint: Int - } - - /** The result of performing a search on a sorted sequence, where the element was found. - * - * @param foundIndex the index corresponding to the element searched for in the sequence - */ - case class Found(foundIndex: Int) extends SearchResult { - override def insertionPoint: Int = foundIndex - } - - /** The result of performing a search on a sorted sequence, where the element was not found - * - * @param insertionPoint the index where the element would be inserted in the sequence - */ - case class InsertionPoint(insertionPoint: Int) extends SearchResult - - @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") - class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal - - @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") - implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = - new SearchImpl(fr.conversion(coll)) -} diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index 365a1db1b849..caabf6fa6436 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -18,7 +18,6 @@ import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn import language.experimental.captureChecking import caps.unsafe.unsafeAssumePure -import scala.annotation.unchecked.uncheckedCaptures /** Base trait for sequence collections * @@ -78,12 +77,10 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) - def iterableFactory: FreeSeqFactory[CC] - /** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should * not be assumed to be efficient unless you have an `IndexedSeq`. */ @throws[IndexOutOfBoundsException] @@ -237,7 +234,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * * @return an iterator yielding the elements of this $coll in reversed order */ - override def reverseIterator: Iterator[A] = reversed.iterator + def reverseIterator: Iterator[A] = reversed.iterator /** Tests whether this $coll contains the given sequence at a given index. * @@ -601,8 +598,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => if (!hasNext) Iterator.empty.next() - val forcedElms = new mutable.ArrayBuffer[A @uncheckedCaptures](elms.size) ++= elms - // uncheckedCaptures OK since used only locally + val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms val result = (newSpecificBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) @@ -893,7 +889,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * part of the result, but any following occurrences will. */ def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) fromSpecific(iterator.filter { x => var include = false occ.updateWith(x) { @@ -918,7 +914,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * in the result, but any following occurrences will be omitted. */ def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) fromSpecific(iterator.filter { x => var include = true occ.updateWith(x) { @@ -966,7 +962,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => iterableFactory.from(new View.Updated(this, index, elem)) } - protected[collection] def occCounts[sealed B](sq: Seq[B]): mutable.Map[B, Int] = { + protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { val occ = new mutable.HashMap[B, Int]() for (y <- sq) occ.updateWith(y) { case None => Some(1) diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala deleted file mode 100644 index a7f2c629b61d..000000000000 --- a/tests/pos-special/stdlib/collection/SeqMap.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -import scala.annotation.nowarn - -/** - * A generic trait for ordered maps. Concrete classes have to provide - * functionality for the abstract methods in `SeqMap`. - * - * Note that when checking for equality [[SeqMap]] does not take into account - * ordering. - * - * @tparam K the type of the keys contained in this linked map. - * @tparam V the type of the values associated with the keys in this linked map. - * @define coll immutable seq map - * @define Coll `immutable.SeqMap` - */ - -trait SeqMap[K, +V] extends Map[K, V] - with MapOps[K, V, SeqMap, SeqMap[K, V]] - with MapFactoryDefaults[K, V, SeqMap, Iterable] { - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SeqMap" - - override def mapFactory: MapFactory[SeqMap] = SeqMap -} - -object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) - diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala deleted file mode 100644 index a4ca1143f8b4..000000000000 --- a/tests/pos-special/stdlib/collection/SeqView.scala +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.nowarn -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure -import scala.annotation.unchecked.uncheckedCaptures - -/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the - * necessary functionality over which SeqViews are defined, and at the same - * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is - * pure, whereas SeqViews are Iterables which can be impure (for instance, - * mapping a SeqView with an impure function gives an impure view). - */ -trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - self: SeqViewOps[A, CC, C]^ => - - def length: Int - def apply(x: Int): A - def appended[B >: A](elem: B): CC[B]^{this} - def prepended[B >: A](elem: B): CC[B]^{this} - def reverse: C^{this} - def sorted[B >: A](implicit ord: Ordering[B]): C^{this} - - def reverseIterator: Iterator[A]^{this} = reversed.iterator -} - -trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { - self: SeqView[A]^ => - - override def view: SeqView[A]^{this} = this - - override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f) - override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this) - override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this) - override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n) - override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n) - override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n) - override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n) - override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a }) - - def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this) - - override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SeqView" -} - -object SeqView { - - /** A `SeqOps` whose collection type and collection type constructor are unknown */ - private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _] - - /** A view that doesn’t apply any transformation to an underlying sequence */ - @SerialVersionUID(3L) - class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { - def apply(idx: Int): A = underlying.apply(idx) - def length: Int = underlying.length - def iterator: Iterator[A]^{this} = underlying.iterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { - def apply(idx: Int): B = f(underlying(idx)) - def length: Int = underlying.length - } - - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] { - def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) - def length: Int = underlying.length + 1 - } - - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] { - def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) - def length: Int = underlying.length + 1 - } - - @SerialVersionUID(3L) - class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] { - def apply(idx: Int): A = { - val l = prefix.length - if (idx < l) prefix(idx) else suffix(idx - l) - } - def length: Int = prefix.length + suffix.length - } - - @SerialVersionUID(3L) - class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { - def apply(i: Int) = underlying.apply(size - 1 - i) - def length = underlying.size - def iterator: Iterator[A]^{this} = underlying.reverseIterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] { - def apply(idx: Int): A = if (idx < n) { - underlying(idx) - } else { - throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})") - } - def length: Int = underlying.length min normN - } - - @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { - private[this] val delta = (underlying.size - (n max 0)) max 0 - def length = underlying.size - delta - @throws[IndexOutOfBoundsException] - def apply(i: Int) = underlying.apply(i + delta) - } - - @SerialVersionUID(3L) - class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { - def length = (underlying.size - normN) max 0 - @throws[IndexOutOfBoundsException] - def apply(i: Int) = underlying.apply(i + normN) - override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n) - } - - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { - private[this] val len = (underlying.size - (n max 0)) max 0 - def length = len - @throws[IndexOutOfBoundsException] - def apply(i: Int) = underlying.apply(i) - } - - @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, - private[this] val len: Int, - ord: Ordering[B]) - extends SeqView[A] { - outer: Sorted[A, B]^ => - - // force evaluation immediately by calling `length` so infinite collections - // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls - def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) - - @SerialVersionUID(3L) - private[this] class ReverseSorted extends SeqView[A] { - private[this] lazy val _reversed = new SeqView.Reverse(_sorted) - - def apply(i: Int): A = _reversed.apply(i) - def length: Int = len - def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy - override def knownSize: Int = len - override def isEmpty: Boolean = len == 0 - override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) - override def reverse: SeqView[A]^{outer} = outer - override protected def reversed: Iterable[A] = outer.unsafeAssumePure - - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = - if (ord1 == Sorted.this.ord) outer.unsafeAssumePure - else if (ord1.isReverseOf(Sorted.this.ord)) this - else new Sorted(elems, len, ord1) - } - - @volatile private[this] var evaluated = false - - private[this] lazy val _sorted: Seq[A] = { - val res = { - val len = this.len - if (len == 0) Nil - else if (len == 1) List(underlying.head) - else { - val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] - underlying.copyToArray(arr) - java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) - // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it - // is safe because: - // - the ArraySeq is immutable, and items that are not of type A - // cannot be added to it - // - we know it only contains items of type A (and if this collection - // contains items of another type, we'd get a CCE anyway) - // - the cast doesn't actually do anything in the runtime because the - // type of A is not known and Array[_] is Array[AnyRef] - immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A @uncheckedCaptures]]) - } - } - evaluated = true - underlying = null - res - } - - private[this] def elems: SomeSeqOps[A]^{this} = { - val orig = underlying - if (evaluated) _sorted else orig - } - - def apply(i: Int): A = _sorted.apply(i) - def length: Int = len - def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy - override def knownSize: Int = len - override def isEmpty: Boolean = len == 0 - override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) - override def reverse: SeqView[A] = new ReverseSorted - // we know `_sorted` is either tiny or has efficient random access, - // so this is acceptable for `reversed` - override protected def reversed: Iterable[A] = new ReverseSorted - - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = - if (ord1 == this.ord) this - else if (ord1.isReverseOf(this.ord)) reverse - else new Sorted(elems, len, ord1) - } -} - -/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala deleted file mode 100644 index a9c279b82a49..000000000000 --- a/tests/pos-special/stdlib/collection/Set.scala +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.util.hashing.MurmurHash3 -import java.lang.String - -import scala.annotation.nowarn -import language.experimental.captureChecking - -/** Base trait for set collections. - */ -trait Set[A] - extends Iterable[A] - with SetOps[A, Set, Set[A]] - with Equals - with IterableFactoryDefaults[A, Set] - with Pure { - self: Set[A] => - - def canEqual(that: Any) = true - - /** - * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if - * - the argument `that` is a `Set`, - * - the two sets have the same [[size]], and - * - for every `element` this set, `other.contains(element) == true`. - * - * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality - * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` - * methods return `true`. - * - * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same - * element equivalence function in their lookup operation. For example, the element equivalence operation in a - * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads - * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` - * (used for lookup in `HashSet`). - * - * {{{ - * scala> import scala.collection.immutable._ - * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ - * - * scala> TreeSet("A")(ord) == HashSet("a") - * val res0: Boolean = false - * - * scala> HashSet("a") == TreeSet("A")(ord) - * val res1: Boolean = true - * }}} - * - * - * @param that The set to which this set is compared - * @return `true` if the two sets are equal according to the description - */ - override def equals(that: Any): Boolean = - (this eq that.asInstanceOf[AnyRef]) || (that match { - case set: Set[A @unchecked] if set.canEqual(this) => - (this.size == set.size) && { - try this.subsetOf(set) - catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 - } - case _ => - false - }) - - override def hashCode(): Int = MurmurHash3.setHash(this) - - override def iterableFactory: IterableFactory[Set] = Set - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "Set" - - override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too -} - -/** Base trait for set operations - * - * @define coll set - * @define Coll `Set` - */ -trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends IterableOps[A, CC, C], (A -> Boolean) { self => - - def contains(elem: A): Boolean - - /** Tests if some element is contained in this set. - * - * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. - * @param elem the element to test for membership. - * @return `true` if `elem` is contained in this set, `false` otherwise. - */ - @`inline` final def apply(elem: A): Boolean = this.contains(elem) - - /** Tests whether this set is a subset of another set. - * - * @param that the set to test. - * @return `true` if this set is a subset of `that`, i.e. if - * every element of this set is also an element of `that`. - */ - def subsetOf(that: Set[A]): Boolean = this.forall(that) - - /** An iterator over all subsets of this set of the given size. - * If the requested size is impossible, an empty iterator is returned. - * - * @param len the size of the subsets. - * @return the iterator. - */ - def subsets(len: Int): Iterator[C] = { - if (len < 0 || len > size) Iterator.empty - else new SubsetsItr(this.to(IndexedSeq), len) - } - - /** An iterator over all subsets of this set. - * - * @return the iterator. - */ - def subsets(): Iterator[C] = new AbstractIterator[C] { - private[this] val elms = SetOps.this.to(IndexedSeq) - private[this] var len = 0 - private[this] var itr: Iterator[C] = Iterator.empty - - def hasNext = len <= elms.size || itr.hasNext - def next() = { - if (!itr.hasNext) { - if (len > elms.size) Iterator.empty.next() - else { - itr = new SubsetsItr(elms, len) - len += 1 - } - } - - itr.next() - } - } - - /** An Iterator including all subsets containing exactly len elements. - * If the elements in 'This' type is ordered, then the subsets will also be in the same order. - * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} - * - * $willForceEvaluation - * - */ - private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { - private[this] val idxs = Array.range(0, len+1) - private[this] var _hasNext = true - idxs(len) = elms.size - - def hasNext = _hasNext - @throws[NoSuchElementException] - def next(): C = { - if (!hasNext) Iterator.empty.next() - - val buf = newSpecificBuilder - idxs.slice(0, len) foreach (idx => buf += elms(idx)) - val result = buf.result() - - var i = len - 1 - while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 - - if (i < 0) _hasNext = false - else { - idxs(i) += 1 - for (j <- (i+1) until len) - idxs(j) = idxs(j-1) + 1 - } - - result - } - } - - /** Computes the intersection between this set and another set. - * - * @param that the set to intersect with. - * @return a new set consisting of all elements that are both in this - * set and in the given set `that`. - */ - def intersect(that: Set[A]): C = this.filter(that) - - /** Alias for `intersect` */ - @`inline` final def & (that: Set[A]): C = intersect(that) - - /** Computes the difference of this set and another set. - * - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def diff(that: Set[A]): C - - /** Alias for `diff` */ - @`inline` final def &~ (that: Set[A]): C = this diff that - - @deprecated("Consider requiring an immutable Set", "2.13.0") - def -- (that: IterableOnce[A]): C = { - val toRemove = that.iterator.to(immutable.Set) - fromSpecific(view.filterNot(toRemove)) - } - - @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") - def - (elem: A): C = diff(Set(elem)) - - @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") - def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) - - /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. - * - * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. - * - * Example: - * {{{ - * scala> val a = Set(1, 2) concat Set(2, 3) - * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) - * }}} - * - * @param that the collection containing the elements to add. - * @return a new $coll with the given elements added, omitting duplicates. - */ - def concat(that: collection.IterableOnce[A]): C = this match { - case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => - // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) - var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] - val it = that.iterator - while (it.hasNext) result = result + it.next() - result.asInstanceOf[C] - case _ => fromSpecific(that match { - case that: collection.Iterable[A] => new View.Concat(this, that) - case _ => iterator.concat(that.iterator) - }) - } - - @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") - def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) - - /** Alias for `concat` */ - @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) - - /** Computes the union between of set and another set. - * - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - @`inline` final def union(that: Set[A]): C = concat(that) - - /** Alias for `union` */ - @`inline` final def | (that: Set[A]): C = concat(that) -} - -/** - * $factoryInfo - * @define coll set - * @define Coll `Set` - */ -@SerialVersionUID(3L) -object Set extends IterableFactory.Delegate[Set](immutable.Set) - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala deleted file mode 100644 index 7b9381ebb078..000000000000 --- a/tests/pos-special/stdlib/collection/SortedMap.scala +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.{implicitNotFound, nowarn} -import language.experimental.captureChecking - -/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ -trait SortedMap[K, +V] - extends Map[K, V] - with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] - with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ - - def unsorted: Map[K, V] = this - - def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SortedMap" - - override def equals(that: Any): Boolean = that match { - case _ if this eq that.asInstanceOf[AnyRef] => true - case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => - (sm canEqual this) && - (this.size == sm.size) && { - val i1 = this.iterator - val i2 = sm.iterator - var allEqual = true - while (allEqual && i1.hasNext) { - val kv1 = i1.next() - val kv2 = i2.next() - allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 - } - allEqual - } - case _ => super.equals(that) - } -} - -trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends MapOps[K, V, Map, C] - with SortedOps[K, C] - with Pure { - - /** The companion object of this sorted map, providing various factory methods. - * - * @note When implementing a custom collection type and refining `CC` to the new type, this - * method needs to be overridden to return a factory for the new type (the compiler will - * issue an error otherwise). - */ - def sortedMapFactory: SortedMapFactory[CC] - - /** Similar to `mapFromIterable`, but returns a SortedMap collection type. - * Note that the return type is now `CC[K2, V2]`. - */ - @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) - - def unsorted: Map[K, V] - - /** - * Creates an iterator over all the key/value pairs - * contained in this map having a key greater than or - * equal to `start` according to the ordering of - * this map. x.iteratorFrom(y) is equivalent - * to but often more efficient than x.from(y).iterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def iteratorFrom(start: K): Iterator[(K, V)] - - /** - * Creates an iterator over all the keys(or elements) contained in this - * collection greater than or equal to `start` - * according to the ordering of this collection. x.keysIteratorFrom(y) - * is equivalent to but often more efficient than - * x.from(y).keysIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def keysIteratorFrom(start: K): Iterator[K] - - /** - * Creates an iterator over all the values contained in this - * map that are associated with a key greater than or equal to `start` - * according to the ordering of this map. x.valuesIteratorFrom(y) is - * equivalent to but often more efficient than - * x.from(y).valuesIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) - - def firstKey: K = head._1 - def lastKey: K = last._1 - - /** Find the element with smallest key larger than or equal to a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption - - /** Find the element with largest key less than a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption - - def rangeTo(to: K): C = { - val i = keySet.rangeFrom(to).iterator - if (i.isEmpty) return coll - val next = i.next() - if (ordering.compare(next, to) == 0) - if (i.isEmpty) coll - else rangeUntil(i.next()) - else - rangeUntil(next) - } - - override def keySet: SortedSet[K] = new KeySortedSet - - /** The implementation class of the set returned by `keySet` */ - protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { - def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) - def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { - val map = SortedMapOps.this.rangeImpl(from, until) - new map.KeySortedSet - } - } - - /** A generic trait that is reused by sorted keyset implementations */ - protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => - implicit def ordering: Ordering[K] = SortedMapOps.this.ordering - def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) - } - - // And finally, we add new overloads taking an ordering - /** Builds a new sorted map by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) - - /** Builds a new sorted map by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.FlatMap(this, f)) - - /** Builds a new sorted map by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Collect(this, pf)) - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(this, it) - case _ => iterator.concat(suffix.iterator) - })(ordering) - - /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) -} - -object SortedMapOps { - private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." - - /** Specializes `MapWithFilter` for sorted Map collections - * - * @define coll sorted map collection - */ - class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( - self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], - p: ((K, V)) => Boolean - ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { - - def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = - self.sortedMapFactory.from(new View.Map(filtered, f)) - - def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = - self.sortedMapFactory.from(new View.FlatMap(filtered, f)) - - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} = - new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) - - } - -} - -@SerialVersionUID(3L) -object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala deleted file mode 100644 index 16751d86d9d5..000000000000 --- a/tests/pos-special/stdlib/collection/SortedOps.scala +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import language.experimental.captureChecking - -/** Base trait for sorted collections */ -trait SortedOps[A, +C] { - - def ordering: Ordering[A] - - /** Returns the first key of the collection. */ - def firstKey: A - - /** Returns the last key of the collection. */ - def lastKey: A - - /** Comparison function that orders keys. */ - @deprecated("Use ordering.compare instead", "2.13.0") - @deprecatedOverriding("Use ordering.compare instead", "2.13.0") - @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) - - /** Creates a ranged projection of this collection. Any mutations in the - * ranged projection will update this collection and vice versa. - * - * Note: keys are not guaranteed to be consistent between this collection - * and the projection. This is the case for buffers where indexing is - * relative to the projection. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * `None` if there is no lower bound. - * @param until The upper-bound (exclusive) of the ranged projection. - * `None` if there is no upper bound. - */ - def rangeImpl(from: Option[A], until: Option[A]): C - - /** Creates a ranged projection of this collection with both a lower-bound - * and an upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) - - /** Creates a ranged projection of this collection with no upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - */ - @deprecated("Use rangeFrom", "2.13.0") - final def from(from: A): C = rangeFrom(from) - - /** Creates a ranged projection of this collection with no upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - */ - def rangeFrom(from: A): C = rangeImpl(Some(from), None) - - /** Creates a ranged projection of this collection with no lower-bound. - * - * @param until The upper-bound (exclusive) of the ranged projection. - */ - @deprecated("Use rangeUntil", "2.13.0") - final def until(until: A): C = rangeUntil(until) - - /** Creates a ranged projection of this collection with no lower-bound. - * - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def rangeUntil(until: A): C = rangeImpl(None, Some(until)) - - /** Create a range projection of this collection with no lower-bound. - * @param to The upper-bound (inclusive) of the ranged projection. - */ - @deprecated("Use rangeTo", "2.13.0") - final def to(to: A): C = rangeTo(to) - - /** Create a range projection of this collection with no lower-bound. - * @param to The upper-bound (inclusive) of the ranged projection. - */ - def rangeTo(to: A): C -} diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala deleted file mode 100644 index fb2f879edcd2..000000000000 --- a/tests/pos-special/stdlib/collection/SortedSet.scala +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.{implicitNotFound, nowarn} -import scala.annotation.unchecked.uncheckedVariance -import language.experimental.captureChecking - -/** Base type of sorted sets */ -trait SortedSet[A] extends Set[A] - with SortedSetOps[A, SortedSet, SortedSet[A]] - with SortedSetFactoryDefaults[A, SortedSet, Set] { - - def unsorted: Set[A] = this - - def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SortedSet" - - override def equals(that: Any): Boolean = that match { - case _ if this eq that.asInstanceOf[AnyRef] => true - case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => - (ss canEqual this) && - (this.size == ss.size) && { - val i1 = this.iterator - val i2 = ss.iterator - var allEqual = true - while (allEqual && i1.hasNext) - allEqual = ordering.equiv(i1.next(), i2.next()) - allEqual - } - case _ => - super.equals(that) - } - -} - -trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SetOps[A, Set, C] - with SortedOps[A, C] { - - /** The companion object of this sorted set, providing various factory methods. - * - * @note When implementing a custom collection type and refining `CC` to the new type, this - * method needs to be overridden to return a factory for the new type (the compiler will - * issue an error otherwise). - */ - def sortedIterableFactory: SortedIterableFactory[CC] - - def unsorted: Set[A] - - /** - * Creates an iterator that contains all values from this collection - * greater than or equal to `start` according to the ordering of - * this collection. x.iteratorFrom(y) is equivalent to but will usually - * be more efficient than x.from(y).iterator - * - * @param start The lower-bound (inclusive) of the iterator - */ - def iteratorFrom(start: A): Iterator[A] - - @deprecated("Use `iteratorFrom` instead.", "2.13.0") - @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) - - def firstKey: A = head - def lastKey: A = last - - /** Find the smallest element larger than or equal to a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def minAfter(key: A): Option[A] = rangeFrom(key).headOption - - /** Find the largest element less than a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption - - override def min[B >: A](implicit ord: Ordering[B]): A = - if (isEmpty) throw new UnsupportedOperationException("empty.min") - else if (ord == ordering) head - else if (ord isReverseOf ordering) last - else super.min[B] // need the type annotation for it to infer the correct implicit - - override def max[B >: A](implicit ord: Ordering[B]): A = - if (isEmpty) throw new UnsupportedOperationException("empty.max") - else if (ord == ordering) last - else if (ord isReverseOf ordering) head - else super.max[B] // need the type annotation for it to infer the correct implicit - - def rangeTo(to: A): C = { - val i = rangeFrom(to).iterator - if (i.isEmpty) return coll - val next = i.next() - if (ordering.compare(next, to) == 0) - if (i.isEmpty) coll - else rangeUntil(i.next()) - else - rangeUntil(next) - } - - /** Builds a new sorted collection by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Map(this, f)) - - /** Builds a new sorted collection by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.FlatMap(this, f)) - - /** Returns a $coll formed from this $coll and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is longer than the other, its remaining elements are ignored. - * - * @param that The iterable providing the second half of each result pair - * @tparam B the type of the second half of the returned pairs - * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. - * The length of the returned collection is the minimum of the lengths of this $coll and `that`. - */ - def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote - sortedIterableFactory.from(that match { - case that: Iterable[B] => new View.Zip(this, that) - case _ => iterator.zip(that) - }) - - /** Builds a new sorted collection by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Collect(this, pf)) -} - -object SortedSetOps { - private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." - private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." - - /** Specialize `WithFilter` for sorted collections - * - * @define coll sorted collection - */ - class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( - self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], - p: A => Boolean - ) extends IterableOps.WithFilter[A, IterableCC](self, p) { - - def map[B : Ordering](f: A => B): CC[B] = - self.sortedIterableFactory.from(new View.Map(filtered, f)) - - def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = - self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) - - override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} = - new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) - } - -} - -@SerialVersionUID(3L) -object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) - diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala deleted file mode 100644 index 0a0ac0075990..000000000000 --- a/tests/pos-special/stdlib/collection/Stepper.scala +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} -import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} -import java.{lang => jl} -import language.experimental.captureChecking - -import scala.collection.Stepper.EfficientSplit - -/** Steppers exist to enable creating Java streams over Scala collections, see - * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections - * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. - * - * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference - * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are - * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). - * These enable iterating over collections holding unboxed primitives (e.g., Arrays, - * [[scala.jdk.Accumulator]]s) without boxing the elements. - * - * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized - * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) - * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). - * - * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive - * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. - * - * @tparam A the element type of the Stepper - */ -trait Stepper[@specialized(Double, Int, Long) +A] { - this: Stepper[A]^ => - - /** Check if there's an element available. */ - def hasStep: Boolean - - /** Return the next element and advance the stepper */ - def nextStep(): A - - /** Split this stepper, if applicable. The elements of the current Stepper are split up between - * the resulting Stepper and the current stepper. - * - * May return `null`, in which case the current Stepper yields the same elements as before. - * - * See method `trySplit` in [[java.util.Spliterator]]. - */ - def trySplit(): Stepper[A] - - /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See - * method `estimateSize` in [[java.util.Spliterator]]. - */ - def estimateSize: Long - - /** Returns a set of characteristics of this Stepper and its elements. See method - * `characteristics` in [[java.util.Spliterator]]. - */ - def characteristics: Int - - /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. - * - * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning - * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] - * (which is a `Stepper[Int]`). - */ - def spliterator[B >: A]: Spliterator[_] - - /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. - * - * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning - * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass - * [[IntStepper]] (which is a `Stepper[Int]`). - */ - def javaIterator[B >: A]: JIterator[_] - - /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to - * primitive Steppers box the elements. - */ - def iterator: Iterator[A] = new AbstractIterator[A] { - def hasNext: Boolean = hasStep - def next(): A = nextStep() - } -} - -object Stepper { - /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time - * and space complexity, and that the division is likely to be reasonably even. Steppers marked - * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method - * defined in [[scala.jdk.StreamConverters]]. - */ - trait EfficientSplit - - private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") - - /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. - * This provides a basis for more efficient stream processing on unboxed values provided that the original source - * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided - * (see for example IntArrayStepper and WidenedByteArrayStepper). */ - - private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Double = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): DoubleStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingDoubleStepper(s) - } - } - - private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingIntStepper(s) - } - } - - private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Long = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): LongStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingLongStepper(s) - } - } - - private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingByteStepper(s) - } - } - - private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingCharStepper(s) - } - } - - private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingShortStepper(s) - } - } - - private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Double = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): DoubleStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingFloatStepper(s) - } - } -} - -/** A Stepper for arbitrary element types. See [[Stepper]]. */ -trait AnyStepper[+A] extends Stepper[A] { - this: AnyStepper[A]^ => - - def trySplit(): AnyStepper[A] - - def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this) - - def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { - def hasNext: Boolean = hasStep - def next(): B = nextStep() - } -} - -object AnyStepper { - class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] { - def tryAdvance(c: Consumer[_ >: A]): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - def trySplit(): Spliterator[A]^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: A]): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - } - - def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) - def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit - - def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) - def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit - - def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) - def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit - - private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { - def hasStep: Boolean = st.hasStep - def nextStep(): Double = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Double] = { - val s = st.trySplit() - if (s == null) null else new BoxedDoubleStepper(s) - } - } - - private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Int] = { - val s = st.trySplit() - if (s == null) null else new BoxedIntStepper(s) - } - } - - private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { - def hasStep: Boolean = st.hasStep - def nextStep(): Long = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Long] = { - val s = st.trySplit() - if (s == null) null else new BoxedLongStepper(s) - } - } -} - -/** A Stepper for Ints. See [[Stepper]]. */ -trait IntStepper extends Stepper[Int] { - this: IntStepper^ => - - def trySplit(): IntStepper - - def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this) - - def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { - def hasNext: Boolean = hasStep - def nextInt(): Int = nextStep() - } -} -object IntStepper { - class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt { - def tryAdvance(c: IntConsumer): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - // Override for efficiency: don't wrap the function and call the `tryAdvance` overload - override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { - case ic: IntConsumer => tryAdvance(ic) - case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false - } - // override required for dotty#6152 - override def trySplit(): Spliterator.OfInt^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: IntConsumer): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { - case ic: IntConsumer => forEachRemaining(ic) - case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } - } - } -} - -/** A Stepper for Doubles. See [[Stepper]]. */ -trait DoubleStepper extends Stepper[Double] { - this: DoubleStepper^ => - def trySplit(): DoubleStepper - - def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this) - - def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble { - def hasNext: Boolean = hasStep - def nextDouble(): Double = nextStep() - } -} - -object DoubleStepper { - class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble { - def tryAdvance(c: DoubleConsumer): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - // Override for efficiency: don't wrap the function and call the `tryAdvance` overload - override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { - case ic: DoubleConsumer => tryAdvance(ic) - case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false - } - // override required for dotty#6152 - override def trySplit(): Spliterator.OfDouble^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: DoubleConsumer): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { - case ic: DoubleConsumer => forEachRemaining(ic) - case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } - } - } -} - -/** A Stepper for Longs. See [[Stepper]]. */ -trait LongStepper extends Stepper[Long] { - this: LongStepper^ => - - def trySplit(): LongStepper^{this} - - def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this) - - def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong { - def hasNext: Boolean = hasStep - def nextLong(): Long = nextStep() - } -} - -object LongStepper { - class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong { - def tryAdvance(c: LongConsumer): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - // Override for efficiency: don't wrap the function and call the `tryAdvance` overload - override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { - case ic: LongConsumer => tryAdvance(ic) - case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false - } - // override required for dotty#6152 - override def trySplit(): Spliterator.OfLong^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: LongConsumer): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { - case ic: LongConsumer => forEachRemaining(ic) - case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } - } - } -} diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala deleted file mode 100644 index c6b520400d89..000000000000 --- a/tests/pos-special/stdlib/collection/StepperShape.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import java.{lang => jl} - -import language.experimental.captureChecking -import scala.collection.Stepper.EfficientSplit - -/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly - * specialized Stepper `S` according to the element type `T`. - */ -sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure { - /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ - def shape: StepperShape.Shape - - /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. - * This is an identity operation for reference shapes. */ - def seqUnbox(st: AnyStepper[T]): S - - /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. - * This is an identity operation for reference shapes. */ - def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit -} - -object StepperShape extends StepperShapeLowPriority1 { - class Shape private[StepperShape] (private val s: Int) extends AnyVal - - // reference - val ReferenceShape = new Shape(0) - - // primitive - val IntShape = new Shape(1) - val LongShape = new Shape(2) - val DoubleShape = new Shape(3) - - // widening - val ByteShape = new Shape(4) - val ShortShape = new Shape(5) - val CharShape = new Shape(6) - val FloatShape = new Shape(7) - - implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { - def shape = IntShape - def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) - def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit - } - implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] - - implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { - def shape = LongShape - def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) - def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit - } - implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] - - implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { - def shape = DoubleShape - def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) - def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit - } - implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] - - implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { - def shape = ByteShape - def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) - def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit - } - implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] - - implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { - def shape = ShortShape - def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) - def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit - } - implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] - - implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { - def shape = CharShape - def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) - def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit - } - implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] - - implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { - def shape = FloatShape - def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) - def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit - } - implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] -} - -trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { - implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] -} - -trait StepperShapeLowPriority2 { - implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] - - protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { - def shape = StepperShape.ReferenceShape - def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st - def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st - } -} \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala deleted file mode 100644 index a9c5e0af43b3..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** - * Trait that overrides map operations to take advantage of strict builders. - * - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] - extends MapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), Iterable, C] - with Pure { - - override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = - strictOptimizedMap(mapFactory.newBuilder, f) - - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = - strictOptimizedFlatMap(mapFactory.newBuilder, f) - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(suffix, mapFactory.newBuilder) - - override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = - strictOptimizedCollect(mapFactory.newBuilder, pf) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { - val b = mapFactory.newBuilder[K, V1] - b ++= this - b += elem1 - b += elem2 - if (elems.nonEmpty) b ++= elems - b.result() - } -} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index bfea9eda8bd3..50ddbca30f9e 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -12,20 +12,19 @@ package scala.collection import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations on sequences in order * to take advantage of strict builders. */ trait StrictOptimizedSeqOps [+A, +CC[_], +C] - extends Any + extends AnyRef with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { override def distinctBy[B](f: A -> B): C = { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B @uncheckedCaptures] + val seen = mutable.HashSet.empty[B] val it = this.iterator while (it.hasNext) { val next = it.next() @@ -80,7 +79,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def diff[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) coll else { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { @@ -98,7 +97,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def intersect[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) empty else { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala deleted file mode 100644 index 8ed337fff998..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** - * Trait that overrides set operations to take advantage of strict builders. - * - * @tparam A Elements type - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends SetOps[A, CC, C] - with StrictOptimizedIterableOps[A, CC, C] { - - override def concat(that: IterableOnce[A]): C = - strictOptimizedConcat(that, newSpecificBuilder) - -} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala deleted file mode 100644 index 9a9e6e367922..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.implicitNotFound -import language.experimental.captureChecking - -/** - * Trait that overrides sorted map operations to take advantage of strict builders. - * - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends SortedMapOps[K, V, CC, C] - with StrictOptimizedMapOps[K, V, Map, C] { - - override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - strictOptimizedMap(sortedMapFactory.newBuilder, f) - - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) - - override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) - - override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - strictOptimizedCollect(sortedMapFactory.newBuilder, pf) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { - val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] - if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] - } -} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala deleted file mode 100644 index ded7deabccca..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.implicitNotFound -import scala.annotation.unchecked.uncheckedVariance - -/** - * Trait that overrides sorted set operations to take advantage of strict builders. - * - * @tparam A Elements type - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SortedSetOps[A, CC, C] - with StrictOptimizedSetOps[A, Set, C] { - - override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - strictOptimizedMap(sortedIterableFactory.newBuilder, f) - - override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) - - override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = - strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) - - override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) - -} diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index 3e3e2f8d872e..f570531def98 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -964,7 +964,7 @@ final class StringOps(private val s: String) extends AnyVal { else if (s.equalsIgnoreCase("false")) false else throw new IllegalArgumentException("For input string: \""+s+"\"") - def toArray[sealed B >: Char](implicit tag: ClassTag[B]): Array[B] = + def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] else new WrappedString(s).toArray[B] diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala deleted file mode 100644 index 47281815da71..000000000000 --- a/tests/pos-special/stdlib/collection/StringParsers.scala +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.tailrec -import language.experimental.captureChecking - -/** A module containing the implementations of parsers from strings to numeric types, and boolean - */ -private[scala] object StringParsers { - - //compile-time constant helpers - - //Int.MinValue == -2147483648 - private final val intOverflowBoundary = -214748364 - private final val intOverflowDigit = 9 - //Long.MinValue == -9223372036854775808L - private final val longOverflowBoundary = -922337203685477580L - private final val longOverflowDigit = 9 - - @inline - private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) - - @inline - private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { - @tailrec - def rec(i: Int, agg: Int): Option[Int] = - if (agg < min) None - else if (i == len) { - if (!isPositive) Some(agg) - else if (agg == min) None - else Some(-agg) - } - else { - val digit = decValue(from.charAt(i)) - if (digit == -1) None - else rec(i + 1, agg * 10 - digit) - } - rec(1, agg) - } - - @inline - private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' - - //bool - @inline - final def parseBool(from: String): Option[Boolean] = - if (from.equalsIgnoreCase("true")) Some(true) - else if (from.equalsIgnoreCase("false")) Some(false) - else None - - //integral types - final def parseByte(from: String): Option[Byte] = { - val len = from.length() - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first) - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v.toByte) - else None - } - else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte) - else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte) - else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte) - else None - } - } - - final def parseShort(from: String): Option[Short] = { - val len = from.length() - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first) - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v.toShort) - else None - } - else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort) - else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort) - else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort) - else None - } - } - - final def parseInt(from: String): Option[Int] = { - val len = from.length() - - @tailrec - def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { - if (i == len) { - if (!isPositive) Some(agg) - else if (agg == Int.MinValue) None - else Some(-agg) - } - else if (agg < intOverflowBoundary) None - else { - val digit = decValue(from.charAt(i)) - if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None - else step(i + 1, (agg * 10) - digit, isPositive) - } - } - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first) - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v) - else None - } - else if (v > -1) step(1, -v, true) - else if (first == '+') step(1, 0, true) - else if (first == '-') step(1, 0, false) - else None - } - } - - final def parseLong(from: String): Option[Long] = { - //like parseInt, but Longer - val len = from.length() - - @tailrec - def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { - if (i == len) { - if (isPositive && agg == Long.MinValue) None - else if (isPositive) Some(-agg) - else Some(agg) - } - else if (agg < longOverflowBoundary) None - else { - val digit = decValue(from.charAt(i)) - if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None - else step(i + 1, agg * 10 - digit, isPositive) - } - } - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first).toLong - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v) - else None - } - else if (v > -1) step(1, -v, true) - else if (first == '+') step(1, 0, true) - else if (first == '-') step(1, 0, false) - else None - } - } - - //floating point - final def checkFloatFormat(format: String): Boolean = { - //indices are tracked with a start index which points *at* the first index - //and an end index which points *after* the last index - //so that slice length === end - start - //thus start == end <=> empty slice - //and format.substring(start, end) is equivalent to the slice - - //some utilities for working with index bounds into the original string - @inline - def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { - @tailrec - def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) - rec(start) - } - - //one after last index for the predicate to hold, or `from` if none hold - //may point after the end of the string - @inline - def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { - @tailrec @inline - def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) - else i - rec(from) - } - - - def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { - def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || - (ch >= 'a' && ch <= 'f') || - (ch >= 'A' && ch <= 'F')) - - def prefixOK(startIndex: Int, endIndex: Int): Boolean = { - val len = endIndex - startIndex - (len > 0) && { - //the prefix part is - //hexDigits - //hexDigits. - //hexDigits.hexDigits - //.hexDigits - //but not . - if (format.charAt(startIndex) == '.') { - (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) - } else { - val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) - (noLeading >= endIndex) || - ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) - } - } - } - - def postfixOK(startIndex: Int, endIndex: Int): Boolean = - (startIndex < endIndex) && { - (forAllBetween(startIndex, endIndex, isDigit)) || { - val startchar = format.charAt(startIndex) - (startchar == '+' || startchar == '-') && - (endIndex - startIndex > 1) && - forAllBetween(startIndex + 1, endIndex, isDigit) - } - } - // prefix [pP] postfix - val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) - (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) - } - - def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { - //invariant: endIndex > startIndex - - def isExp(c: Char): Boolean = c == 'e' || c == 'E' - - def expOK(startIndex: Int, endIndex: Int): Boolean = - (startIndex < endIndex) && { - val startChar = format.charAt(startIndex) - if (startChar == '+' || startChar == '-') - (endIndex > (startIndex + 1)) && - skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex - else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex - } - - //significant can be one of - //* digits.digits - //* .digits - //* digits. - //but not just . - val startChar = format.charAt(startIndex) - if (startChar == '.') { - val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) - // a digit is required followed by optional exp - (noSignificant > startIndex + 1) && (noSignificant >= endIndex || - isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) - ) - } - else if (isDigit(startChar)) { - // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent - val noInt = skipIndexWhile(isDigit, startIndex, endIndex) - // just the digits - (noInt == endIndex) || { - if (format.charAt(noInt) == '.') { - val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) - (noSignificant >= endIndex) || //no exponent - isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) - } else - isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) - } - } - else false - } - - //count 0x00 to 0x20 as "whitespace", and nothing else - val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) - val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 - - if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false - else { - //all formats can have a sign - val unsigned = { - val startchar = format.charAt(unspacedStart) - if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart - } - if (unsigned >= unspacedEnd) false - //that's it for NaN and Infinity - else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" - else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" - else { - //all other formats can have a format suffix - val desuffixed = { - val endchar = format.charAt(unspacedEnd - 1) - if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 - else unspacedEnd - } - val len = desuffixed - unsigned - if (len <= 0) false - else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) - format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) - else isDecFloatLiteral(unsigned, desuffixed) - } - } - } - - @inline - def parseFloat(from: String): Option[Float] = - if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) - else None - - @inline - def parseDouble(from: String): Option[Double] = - if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) - else None - -} diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index d91fc0c49939..85910311a4c3 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -78,7 +78,7 @@ object View extends IterableFactory[View] { def empty[A]: View[A] = Empty - def newBuilder[sealed A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) override def apply[A](xs: A*): View[A] = new Elems(xs: _*) diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala deleted file mode 100644 index 0f3830e9fe25..000000000000 --- a/tests/pos-special/stdlib/collection/WithFilter.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods - * of trait `Iterable`. - * - * @tparam A Element type (e.g. `Int`) - * @tparam CC Collection type constructor (e.g. `List`) - * - * @define coll collection - */ -@SerialVersionUID(3L) -abstract class WithFilter[+A, +CC[_]] extends Serializable { - this: WithFilter[A, CC]^ => - - /** Builds a new collection by applying a function to all elements of the - * `filtered` outer $coll. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying - * the given function `f` to each element of the filtered outer $coll - * and collecting the results. - */ - def map[B](f: A => B): CC[B]^{this, f} - - /** Builds a new collection by applying a function to all elements of the - * `filtered` outer $coll containing this `WithFilter` instance that satisfy - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying - * the given collection-valued function `f` to each element - * of the filtered outer $coll and - * concatenating the results. - */ - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} - - /** Applies a function `f` to all elements of the `filtered` outer $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - */ - def foreach[U](f: A => U): Unit - - /** Further refines the filter for this `filtered` $coll. - * - * @param q the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this $coll which - * also satisfy both `p` and `q` predicates. - */ - def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} - -} diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala deleted file mode 100644 index d985dad2edc5..000000000000 --- a/tests/pos-special/stdlib/collection/concurrent/Map.scala +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.concurrent - -import language.experimental.captureChecking -import scala.annotation.tailrec - -/** A template trait for mutable maps that allow concurrent access. - * - * $concurrentmapinfo - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] - * section on `Concurrent Maps` for more information. - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @define Coll `concurrent.Map` - * @define coll concurrent map - * @define concurrentmapinfo - * This is a base trait for all Scala concurrent map implementations. It - * provides all of the methods a `Map` does, with the difference that all the - * changes are atomic. It also describes methods specific to concurrent maps. - * - * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. - * - * @define atomicop - * This is an atomic operation. - */ -trait Map[K, V] extends scala.collection.mutable.Map[K, V] { - - /** - * Associates the given key with a given value, unless the key was already - * associated with some other value. - * - * $atomicop - * - * @param k key with which the specified value is to be associated with - * @param v value to be associated with the specified key - * @return `Some(oldvalue)` if there was a value `oldvalue` previously - * associated with the specified key, or `None` if there was no - * mapping for the specified key - */ - def putIfAbsent(k: K, v: V): Option[V] - - /** - * Removes the entry for the specified key if it's currently mapped to the - * specified value. - * - * $atomicop - * - * @param k key for which the entry should be removed - * @param v value expected to be associated with the specified key if - * the removal is to take place - * @return `true` if the removal took place, `false` otherwise - */ - def remove(k: K, v: V): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped to - * a given value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param oldvalue value expected to be associated with the specified key - * if replacing is to happen - * @param newvalue value to be associated with the specified key - * @return `true` if the entry was replaced, `false` otherwise - */ - def replace(k: K, oldvalue: V, newvalue: V): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped - * to some value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param v value to be associated with the specified key - * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise - */ - def replace(k: K, v: V): Option[V] - - override def getOrElseUpdate(key: K, op: => V): V = get(key) match { - case Some(v) => v - case None => - val v = op - putIfAbsent(key, v) match { - case Some(ov) => ov - case None => v - } - } - - /** - * Removes the entry for the specified key if it's currently mapped to the - * specified value. Comparison to the specified value is done using reference - * equality. - * - * Not all map implementations can support removal based on reference - * equality, and for those implementations, object equality is used instead. - * - * $atomicop - * - * @param k key for which the entry should be removed - * @param v value expected to be associated with the specified key if - * the removal is to take place - * @return `true` if the removal took place, `false` otherwise - */ - // TODO: make part of the API in a future version - private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) - - /** - * Replaces the entry for the given key only if it was previously mapped to - * a given value. Comparison to the specified value is done using reference - * equality. - * - * Not all map implementations can support replacement based on reference - * equality, and for those implementations, object equality is used instead. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param oldValue value expected to be associated with the specified key - * if replacing is to happen - * @param newValue value to be associated with the specified key - * @return `true` if the entry was replaced, `false` otherwise - */ - // TODO: make part of the API in a future version - private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) - - /** - * Update a mapping for the specified key and its current optionally-mapped value - * (`Some` if there is current mapping, `None` if not). - * - * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. - * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). - * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. - * - * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. - * - * @param key the key value - * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping - * @return the new value associated with the specified key - */ - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) - - @tailrec - private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val previousValue = get(key) - val nextValue = remappingFunction(previousValue) - previousValue match { - case Some(prev) => nextValue match { - case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue - case _ => if (removeRefEq(key, prev)) return None - } - case _ => nextValue match { - case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue - case _ => return None - } - } - updateWithAux(key)(remappingFunction) - } - - private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { - val it = iterator - while (it.hasNext) { - val (k, v) = it.next() - if (!p(k, v)) removeRefEq(k, v) - } - this - } - - private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { - val it = iterator - while (it.hasNext) { - val (k, v) = it.next() - replaceRefEq(k, v, f(k, v)) - } - this - } -} diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala deleted file mode 100644 index f76619a004fa..000000000000 --- a/tests/pos-special/stdlib/collection/generic/BitOperations.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic -import language.experimental.captureChecking - - -/** Some bit operations. - * - * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for - * an explanation of unsignedCompare. - */ -private[collection] object BitOperations { - trait Int { - type Int = scala.Int - def zero(i: Int, mask: Int) = (i & mask) == 0 - def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) - def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix - def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) - def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) - def complement(i: Int) = (-1) ^ i - def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) - def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep - def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) - } - object Int extends Int - - trait Long { - type Long = scala.Long - def zero(i: Long, mask: Long) = (i & mask) == 0L - def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) - def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix - def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) - def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) - def complement(i: Long) = (-1L) ^ i - def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) - def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep - def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) - } - object Long extends Long -} diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala deleted file mode 100644 index 7eba9433b8d5..000000000000 --- a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.generic - -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.collection.{Factory, Iterable} -import scala.collection.mutable.Builder -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** The default serialization proxy for collection implementations. - * - * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` - * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed - * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any - * additional state required to create the proper `Builder` needs to be captured by the `factory`. - */ -@SerialVersionUID(3L) -final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { - - @transient protected var builder: Builder[A @uncheckedCaptures, Any] = _ - // @uncheckedCaptures OK since builder is used only locally when reading objects - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - val k = coll.knownSize - out.writeInt(k) - var count = 0 - coll.foreach { x => - out.writeObject(x) - count += 1 - } - if(k >= 0) { - if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") - } else out.writeObject(SerializeEnd) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - builder = factory.newBuilder - val k = in.readInt() - if(k >= 0) { - builder.sizeHint(k) - var count = 0 - while(count < k) { - builder += in.readObject().asInstanceOf[A] - count += 1 - } - } else { - while (true) in.readObject match { - case SerializeEnd => return - case a => builder += a.asInstanceOf[A] - } - } - } - - protected[this] def readResolve(): Any = builder.result() -} - -@SerialVersionUID(3L) -private[collection] case object SerializeEnd - -/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type - * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or - * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement - * it directly without using this trait if you need a non-standard factory or if you want to use a different - * serialization scheme. - */ -trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => - protected[this] def writeReplace(): AnyRef = { - val f: Factory[Any, Any] = this match { - case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] - case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] - case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) - case it => it.iterableFactory.iterableFactory - } - new DefaultSerializationProxy(f, this) - } -} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala deleted file mode 100644 index c309299b615b..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsIterable.scala +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic -import language.experimental.captureChecking - -/** A trait which can be used to avoid code duplication when defining extension - * methods that should be applicable both to existing Scala collections (i.e., - * types extending `Iterable`) as well as other (potentially user-defined) - * types that could be converted to a Scala collection type. This trait - * makes it possible to treat Scala collections and types that can be implicitly - * converted to a collection type uniformly. For example, one can provide - * extension methods that work both on collection types and on `String`s (`String`s - * do not extend `Iterable`, but can be converted to `Iterable`) - * - * `IsIterable` provides three members: - * - * 1. type member `A`, which represents the element type of the target `Iterable[A]` - * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type - * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. - * - * ===Usage=== - * - * One must provide `IsIterable` as an implicit parameter type of an implicit - * conversion. Its usage is shown below. Our objective in the following example - * is to provide a generic extension method `mapReduce` to any type that extends - * or can be converted to `Iterable`. In our example, this includes - * `String`. - * - * {{{ - * import scala.collection.{Iterable, IterableOps} - * import scala.collection.generic.IsIterable - * - * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { - * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { - * val iter = it(coll).iterator - * var res = mapper(iter.next()) - * while (iter.hasNext) - * res = reducer(res, mapper(iter.next())) - * res - * } - * } - * - * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = - * new ExtensionMethods(coll, it) - * - * // See it in action! - * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 - * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 - *}}} - * - * Here, we begin by creating a class `ExtensionMethods` which contains our - * `mapReduce` extension method. - * - * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where - * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. - * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to - * call the `iterator` method on it. - * The remaining of the implementation is straightforward. - * - * The `withExtensions` implicit conversion makes the `mapReduce` operation available - * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. - * Note how we keep track of the precise type of the implicit `it` argument by using the - * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that - * so that the information carried by the type members `A` and `C` of the `it` argument - * is not lost. - * - * When the `mapReduce` method is called on some type of which it is not - * a member, implicit search is triggered. Because implicit conversion - * `withExtensions` is generic, it will be applied as long as an implicit - * value of type `IsIterable[Repr]` can be found. Given that the - * `IsIterable` companion object contains implicit members that return values of type - * `IsIterable`, this requirement is typically satisfied, and the chain - * of interactions described in the previous paragraph is set into action. - * (See the `IsIterable` companion object, which contains a precise - * specification of the available implicits.) - * - * ''Note'': Currently, it's not possible to combine the implicit conversion and - * the class with the extension methods into an implicit class due to - * limitations of type inference. - * - * ===Implementing `IsIterable` for New Types=== - * - * One must simply provide an implicit value of type `IsIterable` - * specific to the new type, or an implicit conversion which returns an - * instance of `IsIterable` specific to the new type. - * - * Below is an example of an implementation of the `IsIterable` trait - * where the `Repr` type is `Range`. - * - *{{{ - * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = - * new IsIterable[Range] { - * type A = Int - * type C = IndexedSeq[Int] - * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll - * } - *}}} - * - * (Note that in practice the `IsIterable[Range]` instance is already provided by - * the standard library, and it is defined as an `IsSeq[Range]` instance) - */ -trait IsIterable[Repr] extends IsIterableOnce[Repr] { - - /** The type returned by transformation operations that preserve the same elements - * type (e.g. `filter`, `take`). - * - * In practice, this type is often `Repr` itself, excepted in the case - * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. - */ - type C - - @deprecated("'conversion' is now a method named 'apply'", "2.13.0") - override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) - - /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ - def apply(coll: Repr): IterableOps[A, Iterable, C] - -} - -object IsIterable extends IsIterableLowPriority { - - // Straightforward case: IterableOps subclasses - implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = - new IsIterable[CC0[A0]] { - type A = A0 - type C = CC0[A0] - def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll - } - - // The `BitSet` type can not be unified with the `CC0` parameter of - // the above definition because it does not take a type parameter. - // Hence the need for a separate case: - implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = - new IsIterable[C0] { - type A = Int - type C = C0 - def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll - } - -} - -trait IsIterableLowPriority { - - // Makes `IsSeq` instances visible in `IsIterable` companion - implicit def isSeqLikeIsIterable[Repr](implicit - isSeqLike: IsSeq[Repr] - ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike - - // Makes `IsMap` instances visible in `IsIterable` companion - implicit def isMapLikeIsIterable[Repr](implicit - isMapLike: IsMap[Repr] - ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike - -} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala deleted file mode 100644 index 2836ca2bb520..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import language.experimental.captureChecking - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `IterableOnce[A]`. - * - * This type enables simple enrichment of `IterableOnce`s with extension - * methods which can make full use of the mechanics of the Scala collections - * framework in their implementation. - * - * Example usage, - * {{{ - * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { - * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { - * val b = bf.newBuilder(coll) - * for(e <- it(coll).iterator) f(e) foreach (b +=) - * b.result() - * } - * } - * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = - * new FilterMapImpl(coll, it) - * - * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) - * // == List(2, 4) - * }}} - */ -trait IsIterableOnce[Repr] { - - /** The type of elements we can traverse over (e.g. `Int`). */ - type A - - @deprecated("'conversion' is now a method named 'apply'", "2.13.0") - val conversion: Repr => IterableOnce[A] = apply(_) - - /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ - def apply(coll: Repr): IterableOnce[A] - -} - -object IsIterableOnce extends IsIterableOnceLowPriority { - - // Straightforward case: IterableOnce subclasses - implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = - new IsIterableOnce[CC0[A0]] { - type A = A0 - def apply(coll: CC0[A0]): IterableOnce[A0] = coll - } - -} - -trait IsIterableOnceLowPriority { - - // Makes `IsIterable` instance visible in `IsIterableOnce` companion - implicit def isIterableLikeIsIterableOnce[Repr](implicit - isIterableLike: IsIterable[Repr] - ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike - -} diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala deleted file mode 100644 index ad7254d2dd61..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsMap.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic - -import IsMap.Tupled -import scala.collection.immutable.{IntMap, LongMap} -import language.experimental.captureChecking - -/** - * Type class witnessing that a collection type `Repr` - * has keys of type `K`, values of type `V` and has a conversion to - * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. - * - * This type enables simple enrichment of `Map`s with extension methods. - * - * @see [[scala.collection.generic.IsIterable]] - * @tparam Repr Collection type (e.g. `Map[Int, String]`) - */ -trait IsMap[Repr] extends IsIterable[Repr] { - - /** The type of keys */ - type K - - /** The type of values */ - type V - - type A = (K, V) - - /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` - * - * @note The third type parameter of the returned `MapOps` value is - * still `Iterable` (and not `Map`) because `MapView[K, V]` only - * extends `MapOps[K, V, View, View[A]]`. - */ - override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] - -} - -object IsMap { - - /** Convenient type level function that takes a unary type constructor `F[_]` - * and returns a binary type constructor that tuples its parameters and passes - * them to `F`. - * - * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. - */ - type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } - - // Map collections - implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = - new IsMap[CC0[K0, V0]] { - type K = K0 - type V = V0 - type C = CC0[K0, V0] - def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c - } - - // MapView - implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = - new IsMap[CC0[K0, V0]] { - type K = K0 - type V = V0 - type C = View[(K, V)] - def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c - } - - // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition - implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = - new IsMap[mutable.AnyRefMap[K0, V0]] { - type K = K0 - type V = V0 - type C = mutable.AnyRefMap[K0, V0] - def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c - } - - // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters - implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = - new IsMap[IntMap[V0]] { - type K = Int - type V = V0 - type C = IntMap[V0] - def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c - } - - // LongMap is in a similar situation as IntMap - implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = - new IsMap[LongMap[V0]] { - type K = Long - type V = V0 - type C = LongMap[V0] - def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c - } - - // mutable.LongMap is in a similar situation as LongMap and IntMap - implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = - new IsMap[mutable.LongMap[V0]] { - type K = Long - type V = V0 - type C = mutable.LongMap[V0] - def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c - } - - -} diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala deleted file mode 100644 index 8ad344c4d4fc..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsSeq.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic - -import scala.reflect.ClassTag -import language.experimental.captureChecking -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for - * some types `A` and `C`. - * - * This type enables simple enrichment of `Seq`s with extension methods which - * can make full use of the mechanics of the Scala collections framework in - * their implementation. - * - * @see [[scala.collection.generic.IsIterable]] - */ -trait IsSeq[Repr] extends IsIterable[Repr] { - - @deprecated("'conversion' is now a method named 'apply'", "2.13.0") - override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) - - /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` - * - * @note The second type parameter of the returned `SeqOps` value is - * still `Iterable` (and not `Seq`) because `SeqView[A]` only - * extends `SeqOps[A, View, View[A]]`. - */ - def apply(coll: Repr): SeqOps[A, Iterable, C] -} - -object IsSeq { - - private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = - new IsSeq[Seq[Any]] { - type A = Any - type C = Any - def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll - } - - implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = - seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] - - /** !!! Under cc, views are not Seqs and can't use SeqOps. - * So this should be renamed to seqViewIsIterable - */ - implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } = - new IsIterable[CC0[A0]] { - type A = A0 - type C = View[A] - def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll - } - - /** !!! Under cc, views are not Seqs and can't use SeqOps. - * So this should be renamed to stringViewIsIterable - */ - implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } = - new IsIterable[StringView] { - type A = Char - type C = View[Char] - def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll - } - - implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = - new IsSeq[String] { - type A = Char - type C = String - def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = - new SeqOps[Char, immutable.ArraySeq, String] { - def length: Int = s.length - def apply(i: Int): Char = s.charAt(i) - def toIterable: Iterable[Char] = new immutable.WrappedString(s) - protected[this] def coll: String = s - protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString - def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged - override def empty: String = "" - protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder - def iterator: Iterator[Char] = s.iterator - } - } - - implicit def arrayIsSeq[sealed A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = - new IsSeq[Array[A0]] { - type A = A0 - type C = Array[A0] - def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = - new SeqOps[A, mutable.ArraySeq, Array[A]] { - def apply(i: Int): A = a(i) - def length: Int = a.length - def toIterable: Iterable[A] = mutable.ArraySeq.make[A @uncheckedCaptures](a) - protected def coll: Array[A] = a - protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll) - def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged - override def empty: Array[A] = Array.empty[A] - protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder - def iterator: Iterator[A] = a.iterator - } - } - - // `Range` can not be unified with the `CC0` parameter of the - // `seqOpsIsSeq` definition because it does not take a type parameter. - // Hence the need for a separate case: - implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = - new IsSeq[C0] { - type A = Int - type C = immutable.IndexedSeq[Int] - def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll - } - -} diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala deleted file mode 100644 index 2c0967dbaf4b..000000000000 --- a/tests/pos-special/stdlib/collection/generic/Subtractable.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import language.experimental.captureChecking - -/** This trait represents collection-like objects that can be reduced - * using a '+' operator. It defines variants of `-` and `--` - * as convenience methods in terms of single-element removal `-`. - * - * @tparam A the type of the elements of the $coll. - * @tparam Repr the type of the $coll itself - * @define coll collection - * @define Coll Subtractable - */ -@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") -trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => - - /** The representation object of type `Repr` which contains the collection's elements - */ - protected def repr: Repr - - /** Creates a new $coll from this $coll with an element removed. - * @param elem the element to remove - * @return a new collection that contains all elements of the current $coll - * except one less occurrence of `elem`. - */ - def -(elem: A): Repr - - /** Creates a new $coll from this $coll with some elements removed. - * - * This method takes two or more elements to be removed. Another overloaded - * variant of this method handles the case where a single element is - * removed. - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the given elements. - */ - def -(elem1: A, elem2: A, elems: A*): Repr = - this - elem1 - elem2 -- elems - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * @param xs the collection containing the removed elements. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the elements of `elems`. - */ - def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) -} diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala deleted file mode 100644 index 0ba67c1bf76e..000000000000 --- a/tests/pos-special/stdlib/collection/generic/package.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - - -package object generic { - @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") - type Clearable = scala.collection.mutable.Clearable - - @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") - type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] - - @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") - type Growable[-A] = scala.collection.mutable.Growable[A] - - @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") - type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] - - @deprecated("Use IsIterable instead", "2.13.0") - type IsTraversableLike[Repr] = IsIterable[Repr] - - @deprecated("Use IsIterableOnce instead", "2.13.0") - type IsTraversableOnce[Repr] = IsIterableOnce[Repr] -} diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala deleted file mode 100644 index 3a221fc76b6c..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala +++ /dev/null @@ -1,692 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import java.util.Arrays - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.Stepper.EfficientSplit -import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} -import scala.collection.convert.impl._ -import scala.reflect.ClassTag -import scala.runtime.ScalaRunTime -import scala.util.Sorting -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** - * An immutable array. - * - * Supports efficient indexed access and has a small memory footprint. - * - * @define coll immutable array - * @define Coll `ArraySeq` - */ -sealed abstract class ArraySeq[+A] - extends AbstractSeq[A] - with IndexedSeq[A] - with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] - with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] - with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] - with Serializable - with Pure { - - /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive - * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype - * or subtype of the element type. */ - protected def elemTag: ClassTag[_] - - override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged - - /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break - * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. - * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an - * array of a supertype or subtype of the element type. */ - def unsafeArray: Array[_] - - def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]] - - protected def evidenceIterableFactory: ArraySeq.type = ArraySeq - protected def iterableEvidence: ClassTag[A @uncheckedVariance @uncheckedCaptures] = elemTag.asInstanceOf[ClassTag[A]] - - def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit - - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): A - - override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { - val dest = new Array[Any](length) - Array.copy(unsafeArray, 0, dest, 0, length) - dest(index) = elem - ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] - } - - override def map[B](f: A => B): ArraySeq[B] = { - val a = new Array[Any](size) - var i = 0 - while (i < a.length){ - a(i) = f(apply(i)) - i += 1 - } - ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] - } - - override def prepended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]] - - override def appended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] - - /** Fast concatenation of two [[ArraySeq]]s. - * - * @return null if optimisation not possible. - */ - private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { - // Optimise concatenation of two ArraySeqs - // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast - if (isEmpty) - that - else if (that.isEmpty) - this - else { - val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] - val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] - val mismatch = thisIsObj != thatIsObj - if (mismatch) - // Combining primatives and objects: abort - null - else if (thisIsObj) { - // A and B are objects - val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] - val ay = that.unsafeArray.asInstanceOf[Array[B @uncheckedCaptures]] - val len = ax.length + ay.length - val a = new Array[AnyRef](len) - System.arraycopy(ax, 0, a, 0, ax.length) - System.arraycopy(ay, 0, a, ax.length, ay.length) - ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] - } else { - // A is a primative and B = A. Use this instance's protected ClassTag. - val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] - val ay = that.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] - val len = ax.length + ay.length - val a = iterableEvidence.newArray(len) - System.arraycopy(ax, 0, a, 0, ax.length) - System.arraycopy(ay, 0, a, ax.length, ay.length) - ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] - } - } - } - - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = { - def genericResult = { - val k = suffix.knownSize - if (k == 0) this - else { - val b = ArrayBuilder.make[Any] - if(k >= 0) b.sizeHint(k + unsafeArray.length) - b.addAll(unsafeArray) - b.addAll(suffix) - ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] - } - } - - suffix match { - case that: ArraySeq[_] => - val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) - if (result == null) genericResult - else result - case _ => - genericResult - } - } - - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = { - def genericResult = { - val k = prefix.knownSize - if (k == 0) this - else { - val b = ArrayBuilder.make[Any] - if(k >= 0) b.sizeHint(k + unsafeArray.length) - b.addAll(prefix) - if(k < 0) b.sizeHint(b.length + unsafeArray.length) - b.addAll(unsafeArray) - ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] - } - } - - prefix match { - case that: ArraySeq[_] => - val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) - if (result == null) genericResult - else result - case _ => - genericResult - } - } - - override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] = - that match { - case bs: ArraySeq[B] => - ArraySeq.tabulate(length min bs.length) { i => - (apply(i), bs(i)) - } - case _ => - strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) - } - - private inline def ops[A](xs: Array[A @uncheckedCaptures]): ArrayOps[A] = new ArrayOps[A @uncheckedCaptures](xs) - - override def take(n: Int): ArraySeq[A] = - if (unsafeArray.length <= n) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]] - - override def takeRight(n: Int): ArraySeq[A] = - if (unsafeArray.length <= n) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]] - - override def drop(n: Int): ArraySeq[A] = - if (n <= 0) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]] - - override def dropRight(n: Int): ArraySeq[A] = - if (n <= 0) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]] - - override def slice(from: Int, until: Int): ArraySeq[A] = - if (from <= 0 && unsafeArray.length <= until) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]] - - override def foldLeft[B](z: B)(f: (B, A) => B): B = { - // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast - // as the same while-loop over this instead of unsafeArray. - val array = unsafeArray - var b = z - var i = 0 - while (i < array.length) { - val a = array(i).asInstanceOf[A] - b = f(b, a) - i += 1 - } - b - } - - override def foldRight[B](z: B)(f: (A, B) => B): B = { - // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast - // as the same while-loop over this instead of unsafeArray. - val array = unsafeArray - var b = z - var i = array.length - while (i > 0) { - i -= 1 - val a = array(i).asInstanceOf[A] - b = f(a, b) - } - b - } - - override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]] - - override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]] - - override protected[this] def className = "ArraySeq" - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if(copied > 0) { - Array.copy(unsafeArray, 0, xs, start, copied) - } - copied - } - - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - - override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = - if(unsafeArray.length <= 1) this - else { - val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) - Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) - new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] - } -} - -/** - * $factoryInfo - * @define coll immutable array - * @define Coll `ArraySeq` - */ -@SerialVersionUID(3L) -object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => - val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) - - private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) - - def empty[A : ClassTag]: ArraySeq[A] = emptyImpl - - def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match { - case as: ArraySeq[A] => as - case _ => unsafeWrapArray(Array.from[A](it)) - } - - def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = - ArrayBuffer.newBuilder[A @uncheckedCaptures].mapResult(b => unsafeWrapArray[A](b.toArray)) - - override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) - - override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { - val elements = Array.ofDim[A @uncheckedCaptures](scala.math.max(n, 0)) - var i = 0 - while (i < n) { - ScalaRunTime.array_update(elements, i, f(i)) - i = i + 1 - } - ArraySeq.unsafeWrapArray(elements) - } - - /** - * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type - * without copying. Any changes to wrapped array will break the expected immutability. - * - * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without - * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, - * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: - * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still - * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing - * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a - * `ClassCastException` at runtime. - */ - def unsafeWrapArray[T](x: Array[T @uncheckedCaptures]): ArraySeq[T] = ((x: @unchecked) match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] - - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { - def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): T = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any): Boolean = that match { - case that: ofRef[_] => - Array.equals( - this.unsafeArray.asInstanceOf[Array[AnyRef]], - that.unsafeArray.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { - if(unsafeArray.length <= 1) this - else { - val a = unsafeArray.clone() - Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) - new ArraySeq.ofRef(a) - } - } - override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) - else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { - protected def elemTag = ClassTag.Byte - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Byte = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = - if(length <= 1) this - else if(ord eq Ordering.Byte) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofByte(a) - } else super.sorted[B] - override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Byte](elem: B): ArraySeq[B] = - elem match { - case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Byte](elem: B): ArraySeq[B] = - elem match { - case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { - protected def elemTag = ClassTag.Short - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Short = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = - if(length <= 1) this - else if(ord eq Ordering.Short) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofShort(a) - } else super.sorted[B] - override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Short](elem: B): ArraySeq[B] = - elem match { - case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Short](elem: B): ArraySeq[B] = - elem match { - case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { - protected def elemTag = ClassTag.Char - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Char = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = - if(length <= 1) this - else if(ord eq Ordering.Char) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofChar(a) - } else super.sorted[B] - override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Char](elem: B): ArraySeq[B] = - elem match { - case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Char](elem: B): ArraySeq[B] = - elem match { - case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = - (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) - } - - @SerialVersionUID(3L) - final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { - protected def elemTag = ClassTag.Int - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Int = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = - if(length <= 1) this - else if(ord eq Ordering.Int) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofInt(a) - } else super.sorted[B] - override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Int](elem: B): ArraySeq[B] = - elem match { - case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Int](elem: B): ArraySeq[B] = - elem match { - case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { - protected def elemTag = ClassTag.Long - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Long = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = - if(length <= 1) this - else if(ord eq Ordering.Long) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofLong(a) - } else super.sorted[B] - override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Long](elem: B): ArraySeq[B] = - elem match { - case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Long](elem: B): ArraySeq[B] = - elem match { - case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { - protected def elemTag = ClassTag.Float - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Float = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Float](elem: B): ArraySeq[B] = - elem match { - case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Float](elem: B): ArraySeq[B] = - elem match { - case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { - protected def elemTag = ClassTag.Double - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Double = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Double](elem: B): ArraySeq[B] = - elem match { - case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Double](elem: B): ArraySeq[B] = - elem match { - case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { - protected def elemTag = ClassTag.Boolean - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Boolean = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = - if(length <= 1) this - else if(ord eq Ordering.Boolean) { - val a = unsafeArray.clone() - Sorting.stableSort(a) - new ArraySeq.ofBoolean(a) - } else super.sorted[B] - override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = - new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] - override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Boolean](elem: B): ArraySeq[B] = - elem match { - case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Boolean](elem: B): ArraySeq[B] = - elem match { - case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { - protected def elemTag = ClassTag.Unit - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Unit = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofUnit => unsafeArray.length == that.unsafeArray.length - case _ => super.equals(that) - } - override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = - new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala deleted file mode 100644 index 9c2bfdad54d0..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/BitSet.scala +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import BitSetOps.{LogWL, updateArray} -import mutable.Builder -import scala.annotation.{implicitNotFound, nowarn} -import language.experimental.captureChecking - -/** A class for immutable bitsets. - * $bitsetinfo - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] - * section on `Immutable BitSets` for more information. - * - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -sealed abstract class BitSet - extends AbstractSet[Int] - with SortedSet[Int] - with SortedSetOps[Int, SortedSet, BitSet] - with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] - with collection.BitSet - with collection.BitSetOps[BitSet] - with Serializable { - - override def unsorted: Set[Int] = this - - override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - - def bitSetFactory = BitSet - - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) - - def incl(elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) this - else { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - } - - def excl(elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } else this - } - - /** Update word at index `idx`; enlarge set if `idx` outside range of set. - */ - protected def updateWord(idx: Int, w: Long): BitSet - - override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) - override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].map(f) - - override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) - override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].flatMap(f) - - override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) - override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].collect(pf) - - // necessary for disambiguation - override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = - super.zip(that) - - protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) -} - -/** - * $factoryInfo - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - - def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = - it match { - case bs: BitSet => bs - case _ => (newBuilder ++= it).result() - } - - final val empty: BitSet = new BitSet1(0L) - - def newBuilder: Builder[Int, BitSet] = - mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) - - private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else { - val a = java.util.Arrays.copyOf(elems, len) - new BitSetN(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else new BitSetN(elems) - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSet1(val elems: Long) extends BitSet { - protected[collection] def nwords = 1 - protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L - protected[collection] def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet1(w) - else if (idx == 1) createSmall(elems, w) - else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) - - - override def diff(other: collection.Set[Int]): BitSet = other match { - case bs: collection.BitSet => bs.nwords match { - case 0 => this - case _ => - val newElems = elems & ~bs.word(0) - if (newElems == 0L) this.empty else new BitSet1(newElems) - } - case _ => super.diff(other) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) - if (_elems == 0L) this.empty else new BitSet1(_elems) - } - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { - protected[collection] def nwords = 2 - protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L - protected[collection] def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet2(w, elems1) - else if (idx == 1) createSmall(elems0, w) - else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) - - - override def diff(other: collection.Set[Int]): BitSet = other match { - case bs: collection.BitSet => bs.nwords match { - case 0 => this - case 1 => - new BitSet2(elems0 & ~bs.word(0), elems1) - case _ => - val _elems0 = elems0 & ~bs.word(0) - val _elems1 = elems1 & ~bs.word(1) - - if (_elems1 == 0L) { - if (_elems0 == 0L) { - this.empty - } else { - new BitSet1(_elems0) - } - } else { - new BitSet2(_elems0, _elems1) - } - } - case _ => super.diff(other) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) - val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) - - if (_elems1 == 0L) { - if (_elems0 == 0L) { - this.empty - } - else new BitSet1(_elems0) - } - else new BitSet2(_elems0, _elems1) - } - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSetN(val elems: Array[Long]) extends BitSet { - protected[collection] def nwords = elems.length - - protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L - - protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) - - override def diff(that: collection.Set[Int]): BitSet = that match { - case bs: collection.BitSet => - /* - * Algorithm: - * - * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with - * the fewer words. Two extra concerns for optimization are described below. - * - * Array Shrinking: - * If `this` is not longer than `bs`, then since we must iterate through the full array of words, - * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new - * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` - * - * Tracking Changes: - * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, - * we check each word for if it has changed from its corresponding word in `this`. Once a single change is - * detected, we stop checking because the cost of the new Array must be paid anyways. - */ - - val bsnwords = bs.nwords - val thisnwords = nwords - if (bsnwords >= thisnwords) { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = thisnwords - 1 - var currentWord = 0L - // if there are never any changes, we can return `this` at the end - var anyChanges = false - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - i match { - case -1 => - if (anyChanges) { - if (currentWord == 0) { - this.empty - } else { - new BitSet1(currentWord) - } - } else { - this - } - case 0 => - val oldFirstWord = word(0) - val firstWord = oldFirstWord & ~bs.word(0) - anyChanges ||= firstWord != oldFirstWord - if (anyChanges) { - new BitSet2(firstWord, currentWord) - } else { - this - } - case _ => - val minimumNonZeroIndex: Int = i + 1 - while (!anyChanges && i >= 0) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = word(i) & ~bs.word(i) - i -= 1 - } - new BitSetN(newArray) - } else { - this - } - } - } else { - var i = bsnwords - 1 - var anyChanges = false - var currentWord = 0L - while (i >= 0 && !anyChanges) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newElems = elems.clone() - newElems(i + 1) = currentWord - while (i >= 0) { - newElems(i) = word(i) & ~bs.word(i) - i -= 1 - } - this.fromBitMaskNoCopy(newElems) - } else { - this - } - } - case _ => super.diff(that) - } - - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = nwords - 1 - var currentWord = 0L - // if there are never any changes, we can return `this` at the end - var anyChanges = false - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - i match { - case -1 => - if (anyChanges) { - if (currentWord == 0) { - this.empty - } else { - new BitSet1(currentWord) - } - } else { - this - } - case 0 => - val oldFirstWord = word(0) - val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) - anyChanges ||= firstWord != oldFirstWord - if (anyChanges) { - new BitSet2(firstWord, currentWord) - } else { - this - } - case _ => - val minimumNonZeroIndex: Int = i + 1 - while (!anyChanges && i >= 0) { - val oldWord = word(i) - currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) - i -= 1 - } - new BitSetN(newArray) - } else { - this - } - } - } - - override def toBitMask: Array[Long] = elems.clone() - } - - @SerialVersionUID(3L) - private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { - protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala deleted file mode 100644 index fc9bcb022874..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable - - -import java.lang.Integer.bitCount -import java.lang.Math.ceil -import java.lang.System.arraycopy -import language.experimental.captureChecking - -private[collection] object Node { - final val HashCodeLength = 32 - - final val BitPartitionSize = 5 - - final val BitPartitionMask = (1 << BitPartitionSize) - 1 - - final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt - - final val BranchingFactor = 1 << BitPartitionSize - - final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask - - final def bitposFrom(mask: Int): Int = 1 << mask - - final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) - - final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) - -} - -private[collection] abstract class Node[T <: Node[T]] { - - def hasNodes: Boolean - - def nodeArity: Int - - def getNode(index: Int): T - - def hasPayload: Boolean - - def payloadArity: Int - - def getPayload(index: Int): Any - - def getHash(index: Int): Int - - def cachedJavaKeySetHashCode: Int - - private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = - new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") - - protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Int](as.length - 1) - arraycopy(as, 0, result, 0, ix) - arraycopy(as, ix + 1, result, ix, as.length - ix - 1) - result - } - - protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Any](as.length - 1) - arraycopy(as, 0, result, 0, ix) - arraycopy(as, ix + 1, result, ix, as.length - ix - 1) - result - } - - protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Int](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } - protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Any](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } -} - -/** - * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a - * depth-first pre-order traversal, which yields first all payload elements of the current - * node before traversing sub-nodes (left to right). - * - * @tparam T the trie node type we are iterating over - */ -private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { - - import Node.MaxDepth - - // Note--this code is duplicated to a large extent both in - // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. - // If you change this code, check those also in case they also - // need to be modified. - - protected var currentValueCursor: Int = 0 - protected var currentValueLength: Int = 0 - protected var currentValueNode: T = _ - - private[this] var currentStackLevel: Int = -1 - private[this] var nodeCursorsAndLengths: Array[Int] = _ - private[this] var nodes: Array[T] = _ - private def initNodes(): Unit = { - if (nodeCursorsAndLengths eq null) { - nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) - nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] - } - } - - def this(rootNode: T) = { - this() - if (rootNode.hasNodes) pushNode(rootNode) - if (rootNode.hasPayload) setupPayloadNode(rootNode) - } - - private final def setupPayloadNode(node: T): Unit = { - currentValueNode = node - currentValueCursor = 0 - currentValueLength = node.payloadArity - } - - private final def pushNode(node: T): Unit = { - initNodes() - currentStackLevel = currentStackLevel + 1 - - val cursorIndex = currentStackLevel * 2 - val lengthIndex = currentStackLevel * 2 + 1 - - nodes(currentStackLevel) = node - nodeCursorsAndLengths(cursorIndex) = 0 - nodeCursorsAndLengths(lengthIndex) = node.nodeArity - } - - private final def popNode(): Unit = { - currentStackLevel = currentStackLevel - 1 - } - - /** - * Searches for next node that contains payload values, - * and pushes encountered sub-nodes on a stack for depth-first traversal. - */ - private final def searchNextValueNode(): Boolean = { - while (currentStackLevel >= 0) { - val cursorIndex = currentStackLevel * 2 - val lengthIndex = currentStackLevel * 2 + 1 - - val nodeCursor = nodeCursorsAndLengths(cursorIndex) - val nodeLength = nodeCursorsAndLengths(lengthIndex) - - if (nodeCursor < nodeLength) { - nodeCursorsAndLengths(cursorIndex) += 1 - - val nextNode = nodes(currentStackLevel).getNode(nodeCursor) - - if (nextNode.hasNodes) { pushNode(nextNode) } - if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } - } else { - popNode() - } - } - - return false - } - - final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() - -} - -/** - * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base - * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). - * - * @tparam T the trie node type we are iterating over - */ -private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { - - import Node.MaxDepth - - protected var currentValueCursor: Int = -1 - protected var currentValueNode: T = _ - - private[this] var currentStackLevel: Int = -1 - private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) - private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] - - def this(rootNode: T) = { - this() - pushNode(rootNode) - searchNextValueNode() - } - - private final def setupPayloadNode(node: T): Unit = { - currentValueNode = node - currentValueCursor = node.payloadArity - 1 - } - - private final def pushNode(node: T): Unit = { - currentStackLevel = currentStackLevel + 1 - - nodeStack(currentStackLevel) = node - nodeIndex(currentStackLevel) = node.nodeArity - 1 - } - - private final def popNode(): Unit = { - currentStackLevel = currentStackLevel - 1 - } - - /** - * Searches for rightmost node that contains payload values, - * and pushes encountered sub-nodes on a stack for depth-first traversal. - */ - private final def searchNextValueNode(): Boolean = { - while (currentStackLevel >= 0) { - val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 - - if (nodeCursor >= 0) { - val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) - pushNode(nextNode) - } else { - val currNode = nodeStack(currentStackLevel) - popNode() - - if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } - } - } - - return false - } - - final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() - -} diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala deleted file mode 100644 index c364924db3a3..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/HashMap.scala +++ /dev/null @@ -1,2425 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.immutable - -import java.lang.Integer.bitCount -import java.lang.System.arraycopy - -import scala.annotation.unchecked.{uncheckedVariance => uV} -import scala.collection.Hashing.improve -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable, mutable.ReusableBuilder -import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} -import scala.runtime.AbstractFunction2 -import scala.runtime.Statics.releaseFence -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. - * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. - * - * @tparam K the type of the keys contained in this hash set. - * @tparam V the type of the values associated with the keys in this hash map. - * - * @define Coll `immutable.HashMap` - * @define coll immutable champ hash map - */ - -final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) - extends AbstractMap[K, V] - with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] - with MapFactoryDefaults[K, V, HashMap, Iterable] - with DefaultSerializable { - - def this() = this(MapNode.empty) - - // This release fence is present because rootNode may have previously been mutated during construction. - releaseFence() - - override def mapFactory: MapFactory[HashMap] = HashMap - - override def knownSize: Int = rootNode.size - - override def size: Int = rootNode.size - - override def isEmpty: Boolean = rootNode.size == 0 - - override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet - - private final class HashKeySet extends ImmutableKeySet { - - private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = - if (newHashMap eq HashMap.this) this else newHashMap.keySet - private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = - if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet - - override def incl(elem: K): Set[K] = { - val originalHash = elem.## - val improvedHash = improve(originalHash) - val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) - newKeySetOrThis(newNode) - } - override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) - override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) - override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) - } - - def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else new MapKeyValueTupleIterator[K, V](rootNode) - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else new MapKeyIterator[K, V](rootNode) - } - override def valuesIterator: Iterator[V] = { - if (isEmpty) Iterator.empty - else new MapValueIterator[K, V](rootNode) - } - - protected[immutable] def reverseIterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else new MapKeyValueTupleReverseIterator[K, V](rootNode) - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape. - parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import collection.convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) - case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) - case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import collection.convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) - case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) - case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) - } - s.asInstanceOf[S with EfficientSplit] - } - - override final def contains(key: K): Boolean = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) - } - - override def apply(key: K): V = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.apply(key, keyUnimprovedHash, keyHash, 0) - } - - def get(key: K): Option[V] = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.get(key, keyUnimprovedHash, keyHash, 0) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) - } - - @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = - if (newRootNode eq rootNode) this else new HashMap(newRootNode) - - def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { - val keyUnimprovedHash = key.## - newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) - } - - // preemptively overridden in anticipation of performance optimizations - override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = - super.updatedWith[V1](key)(remappingFunction) - - def removed(key: K): HashMap[K, V] = { - val keyUnimprovedHash = key.## - newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) - } - - override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match { - case hm: HashMap[K, V1] => - if (isEmpty) hm - else { - val newNode = rootNode.concat(hm.rootNode, 0) - if (newNode eq hm.rootNode) hm - else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) - } - case hm: mutable.HashMap[K @unchecked, V @unchecked] => - val iter = hm.nodeIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = hm.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - - while (iter.hasNext) { - val next = iter.next() - val originalHash = hm.unimproveHash(next.hash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) - } - return new HashMap(current) - } - } - this - case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => - val iter = lhm.entryIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhm.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhm.unimproveHash(next.hash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) - } - return new HashMap(current) - } - } - this - case _ => - class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { - var changed = false - var shallowlyMutableNodeMap: Int = 0 - var current: BitmapIndexedMapNode[K, V1] = rootNode - def apply(kv: (K, V1)) = apply(kv._1, kv._2) - def apply(key: K, value: V1): Unit = { - val originalHash = key.## - val improved = improve(originalHash) - if (!changed) { - current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) - if (current ne rootNode) { - // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that - // the first changed key ended up in a subnode beneath root, we mark that root right away as being - // shallowly mutable. - // - // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with - // certainty that it either caused a new subnode to be created underneath `current`, in which case we should - // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is - // done by including its bit position in the shallowlyMutableNodeMap anyways. - changed = true - shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - } - } else { - shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) - } - } - } - that match { - case thatMap: Map[K, V1] => - if (thatMap.isEmpty) this - else { - val accum = new accum - thatMap.foreachEntry(accum) - newHashMapOrThis(accum.current) - } - case _ => - val it = that.iterator - if (it.isEmpty) this - else { - val accum = new accum - it.foreach(accum) - newHashMapOrThis(accum.current) - } - } - } - - override def tail: HashMap[K, V] = this - head._1 - - override def init: HashMap[K, V] = this - last._1 - - override def head: (K, V) = iterator.next() - - override def last: (K, V) = reverseIterator.next() - - override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) - - override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) - - /** Applies a function to each key, value, and **original** hash value in this Map */ - @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) - - override def equals(that: Any): Boolean = - that match { - case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) - case _ => super.equals(that) - } - - override def hashCode(): Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be - // immutable. - val hashIterator = new MapKeyValueTupleHashIterator(rootNode) - val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) - // assert(hash == super.hashCode()) - hash - } - } - - override protected[this] def className = "HashMap" - - /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge - * function to resolve any key collisions between the two HashMaps. - * - * @example {{{ - * val left = HashMap(1 -> 1, 2 -> 1) - * val right = HashMap(2 -> 2, 3 -> 2) - * - * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } - * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) - * - * }}} - * - * @param that the HashMap to merge this HashMap with - * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then - * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to - * `that.concat(this)` - * - * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or - * found in `this` or `that`, it is not defined which value will be chosen. For example: - * - * Colliding multiple results of merging: - * {{{ - * // key `3` collides between a result of merging keys `1` and `2` - * val left = HashMap(1 -> 1, 2 -> 2) - * val right = HashMap(1 -> 1, 2 -> 2) - * - * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } - * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) - * }}} - * Colliding results of merging with other keys: - * {{{ - * // key `2` collides between a result of merging `1`, and existing key `2` - * val left = HashMap(1 -> 1, 2 -> 1) - * val right = HashMap(1 -> 2) - * - * val merged = left.merged(right)((_,_) => 2 -> 3) - * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) - * }}} - * - */ - def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = - if (mergef == null) { - that ++ this - } else { - if (isEmpty) that - else if (that.isEmpty) this - else if (size == 1) { - val payload@(k, v) = rootNode.getPayload(0) - val originalHash = rootNode.getHash(0) - val improved = improve(originalHash) - - if (that.rootNode.containsKey(k, originalHash, improved, 0)) { - val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) - val (mergedK, mergedV) = mergef(payload, thatPayload) - val mergedOriginalHash = mergedK.## - val mergedImprovedHash = improve(mergedOriginalHash) - new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) - } else { - new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) - } - } else if (that.size == 0) { - val thatPayload@(k, v) = rootNode.getPayload(0) - val thatOriginalHash = rootNode.getHash(0) - val thatImproved = improve(thatOriginalHash) - - if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { - val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) - val (mergedK, mergedV) = mergef(payload, thatPayload) - val mergedOriginalHash = mergedK.## - val mergedImprovedHash = improve(mergedOriginalHash) - new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) - } else { - new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) - } - } else { - val builder = new HashMapBuilder[K, V1] - rootNode.mergeInto(that.rootNode, builder, 0)(mergef) - builder.result() - } - } - - override def transform[W](f: (K, V) => W): HashMap[K, W] = - newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] - - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { - val newRootNode = rootNode.filterImpl(pred, isFlipped) - if (newRootNode eq rootNode) this - else if (newRootNode.size == 0) HashMap.empty - else new HashMap(newRootNode) - } - - override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = { - if (isEmpty) { - this - } else { - keys match { - case hashSet: HashSet[K] => - if (hashSet.isEmpty) { - this - } else { - // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree - // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` - val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) - if (newRootNode eq rootNode) this - else if (newRootNode.size <= 0) HashMap.empty - else new HashMap(newRootNode) - } - case hashSet: collection.mutable.HashSet[K] => - if (hashSet.isEmpty) { - this - } else { - val iter = hashSet.nodeIterator - var curr = rootNode - - while (iter.hasNext) { - val next = iter.next() - val originalHash = hashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - curr = curr.removed(next.key, originalHash, improved, 0) - if (curr.size == 0) { - return HashMap.empty - } - } - newHashMapOrThis(curr) - } - case lhashSet: collection.mutable.LinkedHashSet[K] => - if (lhashSet.isEmpty) { - this - } else { - val iter = lhashSet.entryIterator - var curr = rootNode - - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - curr = curr.removed(next.key, originalHash, improved, 0) - if (curr.size == 0) { - return HashMap.empty - } - } - newHashMapOrThis(curr) - } - case _ => - val iter = keys.iterator - var curr = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = next.## - val improved = improve(originalHash) - curr = curr.removed(next, originalHash, improved, 0) - if (curr.size == 0) { - return HashMap.empty - } - } - newHashMapOrThis(curr) - } - } - } - - override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, - // based on the result of applying `p` to its elements and subnodes. - super.partition(p) - } - - override def take(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including - // those nodes in the resulting trie, until `n` total elements have been included. - super.take(n) - } - - override def takeRight(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and - // and including those nodes in the resulting trie, until `n` total elements have been included. - super.takeRight(n) - } - - override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and - // including those nodes in the resulting trie, until `p` returns `false` - super.takeWhile(p) - } - - override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and - // dropping those nodes in the resulting trie, until `p` returns `true` - super.dropWhile(p) - } - - override def dropRight(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse - // order, and dropping all nodes until `n` elements have been dropped - super.dropRight(n) - } - - override def drop(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and - // dropping all nodes until `n` elements have been dropped - super.drop(n) - } - - override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and - // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. - // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality - // checks. - super.span(p) - } - -} - -private[immutable] object MapNode { - - private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) - - def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] - - final val TupleLength = 2 - -} - - -private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { - def apply(key: K, originalHash: Int, hash: Int, shift: Int): V - - def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] - - def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 - - def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean - - /** Returns a MapNode with the passed key-value assignment added - * - * @param key the key to add to the MapNode - * @param value the value to associate with `key` - * @param originalHash the original hash of `key` - * @param hash the improved hash of `key` - * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) - * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value - * argument. - * if false, then the key will be inserted if not already present, however if the key is present - * then the passed value will not replace the current value. That is, if `false`, then this - * method has `update if not exists` semantics. - */ - def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] - - def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] - - def hasNodes: Boolean - - def nodeArity: Int - - def getNode(index: Int): MapNode[K, V] - - def hasPayload: Boolean - - def payloadArity: Int - - def getKey(index: Int): K - - def getValue(index: Int): V - - def getPayload(index: Int): (K, V) - - def size: Int - - def foreach[U](f: ((K, V)) => U): Unit - - def foreachEntry[U](f: (K, V) => U): Unit - - def foreachWithHash(f: (K, V, Int) => Unit): Unit - - def transform[W](f: (K, V) => W): MapNode[K, W] - - def copy(): MapNode[K, V] - - def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] - - def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] - - /** Merges this node with that node, adding each resulting tuple to `builder` - * - * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` - * - * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, - * as `this` is, within the left tree - */ - def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit - - /** Returns the exact (equal by reference) key, and value, associated to a given key. - * If the key is not bound to a value, then an exception is thrown - */ - def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) - - /** Adds all key-value pairs to a builder */ - def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit -} - -private final class BitmapIndexedMapNode[K, +V]( - var dataMap: Int, - var nodeMap: Int, - var content: Array[Any], - var originalHashes: Array[Int], - var size: Int, - var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { - - releaseFence() - - import MapNode._ - import Node._ - - /* - assert(checkInvariantContentIsWellTyped()) - assert(checkInvariantSubNodesAreCompacted()) - - private final def checkInvariantSubNodesAreCompacted(): Boolean = - new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity - - private final def checkInvariantContentIsWellTyped(): Boolean = { - val predicate1 = TupleLength * payloadArity + nodeArity == content.length - - val predicate2 = Range(0, TupleLength * payloadArity) - .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) - - val predicate3 = Range(TupleLength * payloadArity, content.length) - .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) - - predicate1 && predicate2 && predicate3 - } - */ - - def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] - def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] - - def getPayload(index: Int) = Tuple2( - content(TupleLength * index).asInstanceOf[K], - content(TupleLength * index + 1).asInstanceOf[V]) - - override def getHash(index: Int): Int = originalHashes(index) - - def getNode(index: Int): MapNode[K, V] = - content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] - - def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") - } else if ((nodeMap & bitpos) != 0) { - getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) - } else { - throw new NoSuchElementException(s"key not found: $key") - } - } - - def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = this.getKey(index) - if (key == key0) Some(this.getValue(index)) else None - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) - } else { - None - } - } - - override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { - val mask = maskFrom(hash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val payload = getPayload(index) - if (key == payload._1) payload else throw new NoSuchElementException - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) - } else { - throw new NoSuchElementException - } - } - - def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = this.getKey(index) - if (key == key0) getValue(index) else f - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) - } else { - f - } - } - - override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) - (originalHashes(index) == originalHash) && key == getKey(index) - } else if ((nodeMap & bitpos) != 0) { - getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) - } else { - false - } - } - - - def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = getKey(index) - val key0UnimprovedHash = getHash(index) - if (key0UnimprovedHash == originalHash && key0 == key) { - if (replaceValue) { - val value0 = this.getValue(index) - if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) - this - else copyAndSetValue(bitpos, key, value) - } else this - } else { - val value0 = this.getValue(index) - val key0Hash = improve(key0UnimprovedHash) - val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - - copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) - } - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) - - if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) - } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) - } - - /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately - * descendant child nodes (only one level beneath `this`) - * - * The caller should pass a bitmap of child nodes of this node, which this method may mutate. - * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will - * be shallowly mutated (its children will not be mutated). - * - * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then - * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. - * - * @param key the key to update - * @param value the value to set `key` to - * @param originalHash key.## - * @param keyHash the improved hash - * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated - * during the call to this method - * - * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be - * available for mutations in subsequent calls. - */ - def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = getKey(index) - val key0UnimprovedHash = getHash(index) - if (key0UnimprovedHash == originalHash && key0 == key) { - val value0 = this.getValue(index) - if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - content(idx + 1) = value - } - shallowlyMutableNodeMap - } else { - val value0 = this.getValue(index) - val key0Hash = improve(key0UnimprovedHash) - - val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) - shallowlyMutableNodeMap | bitpos - } - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - val subNodeSize = subNode.size - val subNodeHashCode = subNode.cachedJavaKeySetHashCode - - var returnMutableNodeMap = shallowlyMutableNodeMap - - val subNodeNew: MapNode[K, V1] = subNode match { - case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => - subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) - subNodeBm - case _ => - val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) - if (result ne subNode) { - returnMutableNodeMap |= bitpos - } - result - } - - this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew - this.size = this.size - subNodeSize + subNodeNew.size - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode - returnMutableNodeMap - } else { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - dst(idx + 1) = value - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - this.dataMap |= bitpos - this.content = dst - this.originalHashes = insertElement(originalHashes, dataIx, originalHash) - this.size += 1 - this.cachedJavaKeySetHashCode += keyHash - shallowlyMutableNodeMap - } - } - - def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = this.getKey(index) - - if (key0 == key) { - if (this.payloadArity == 2 && this.nodeArity == 0) { - /* - * Create new node with remaining pair. The new node will a) either become the new root - * returned, or b) unwrapped and inlined during returning. - */ - val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) - if (index == 0) - new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) - else - new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) - } else copyAndRemoveValue(bitpos, keyHash) - } else this - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) - // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") - - if (subNodeNew eq subNode) return this - - // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided - // in Vector#length - val subNodeNewSize = subNodeNew.size - - if (subNodeNewSize == 1) { - if (this.size == subNode.size) { - // subNode is the only child (no other data or node children of `this` exist) - // escalate (singleton or empty) result - subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] - } else { - // inline value (move to front) - copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) - } - } else if (subNodeNewSize > 1) { - // modify current node (set replacement node) - copyAndSetNode(bitpos, subNode, subNodeNew) - } else this - } else this - } - - def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { - // assert(key0 != key1) - - if (shift >= HashCodeLength) { - new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) - } else { - val mask0 = maskFrom(keyHash0, shift) - val mask1 = maskFrom(keyHash1, shift) - val newCachedHash = keyHash0 + keyHash1 - - if (mask0 != mask1) { - // unique prefixes, payload fits on same level - val dataMap = bitposFrom(mask0) | bitposFrom(mask1) - - if (mask0 < mask1) { - new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) - } else { - new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) - } - } else { - // identical prefixes, payload must be disambiguated deeper in the trie - val nodeMap = bitposFrom(mask0) - val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) - new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) - } - } - } - - def hasNodes: Boolean = nodeMap != 0 - - def nodeArity: Int = bitCount(nodeMap) - - def hasPayload: Boolean = dataMap != 0 - - def payloadArity: Int = bitCount(dataMap) - - def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) - - def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) - - def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - //dst(idx) = newKey - dst(idx + 1) = newValue - new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) - } - - def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { - val idx = this.content.length - 1 - this.nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - dst(idx) = newNode - new BitmapIndexedMapNode[K, V1]( - dataMap, - nodeMap, - dst, - originalHashes, - size - oldNode.size + newNode.size, - cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode - ) - } - - def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - dst(idx + 1) = value - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(originalHashes, dataIx, originalHash) - - new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) - } - - def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length - TupleLength) - - // copy 'src' and remove 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) - } - - /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. - * - * @param bitpos the bit position of the data to migrate to node - * @param keyHash the improved hash of the key currently at `bitpos` - * @param node the node to place at `bitpos` beneath `this` - */ - def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length - TupleLength + 1) - - // copy 'src' and remove 2 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld <= idxNew) - arraycopy(src, 0, dst, 0, idxOld) - arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) - dst(idxNew) = node - arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - this.dataMap = dataMap ^ bitpos - this.nodeMap = nodeMap | bitpos - this.content = dst - this.originalHashes = dstHashes - this.size = size - 1 + node.size - this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode - this - } - - def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length - TupleLength + 1) - - // copy 'src' and remove 2 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld <= idxNew) - arraycopy(src, 0, dst, 0, idxOld) - arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) - dst(idxNew) = node - arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - new BitmapIndexedMapNode[K, V1]( - dataMap = dataMap ^ bitpos, - nodeMap = nodeMap | bitpos, - content = dst, - originalHashes = dstHashes, - size = size - 1 + node.size, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode - ) - } - - def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { - val idxOld = this.content.length - 1 - nodeIndex(bitpos) - val dataIxNew = dataIndex(bitpos) - val idxNew = TupleLength * dataIxNew - - val key = node.getKey(0) - val value = node.getValue(0) - val src = this.content - val dst = new Array[Any](src.length - 1 + TupleLength) - - // copy 'src' and remove 1 element(s) at position 'idxOld' and - // insert 2 element(s) at position 'idxNew' - // assert(idxOld >= idxNew) - arraycopy(src, 0, dst, 0, idxNew) - dst(idxNew) = key - dst(idxNew + 1) = value - arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) - arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) - val hash = node.getHash(0) - val dstHashes = insertElement(originalHashes, dataIxNew, hash) - new BitmapIndexedMapNode[K, V1]( - dataMap = dataMap | bitpos, - nodeMap = nodeMap ^ bitpos, - content = dst, - originalHashes = dstHashes, - size = size - oldNode.size + 1, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode - ) - } - - override def foreach[U](f: ((K, V)) => U): Unit = { - val iN = payloadArity // arity doesn't change during this operation - var i = 0 - while (i < iN) { - f(getPayload(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreach(f) - j += 1 - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val iN = payloadArity // arity doesn't change during this operation - var i = 0 - while (i < iN) { - f(getKey(i), getValue(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreachEntry(f) - j += 1 - } - } - - override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { - var i = 0 - val iN = payloadArity // arity doesn't change during this operation - while (i < iN) { - f(getKey(i), getValue(i), getHash(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreachWithHash(f) - j += 1 - } - } - override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { - var i = 0 - val iN = payloadArity - val jN = nodeArity - while (i < iN) { - builder.addOne(getKey(i), getValue(i), getHash(i)) - i += 1 - } - - var j = 0 - while (j < jN) { - getNode(j).buildTo(builder) - j += 1 - } - } - - override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { - var newContent: Array[Any] = null - val iN = payloadArity // arity doesn't change during this operation - val jN = nodeArity // arity doesn't change during this operation - val newContentLength = content.length - var i = 0 - while (i < iN) { - val key = getKey(i) - val value = getValue(i) - val newValue = f(key, value) - if (newContent eq null) { - if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { - newContent = content.clone() - newContent(TupleLength * i + 1) = newValue - } - } else { - newContent(TupleLength * i + 1) = newValue - } - i += 1 - } - - var j = 0 - while (j < jN) { - val node = getNode(j) - val newNode = node.transform(f) - if (newContent eq null) { - if (newNode ne node) { - newContent = content.clone() - newContent(newContentLength - j - 1) = newNode - } - } else - newContent(newContentLength - j - 1) = newNode - j += 1 - } - if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] - else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) - } - - override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { - case bm: BitmapIndexedMapNode[K, V] @unchecked => - if (size == 0) { - that.buildTo(builder) - return - } else if (bm.size == 0) { - buildTo(builder) - return - } - - val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap - - val minIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - { - var index = minIndex - var leftIdx = 0 - var rightIdx = 0 - - while (index < maxIndex) { - val bitpos = bitposFrom(index) - - if ((bitpos & dataMap) != 0) { - val leftKey = getKey(leftIdx) - val leftValue = getValue(leftIdx) - val leftOriginalHash = getHash(leftIdx) - if ((bitpos & bm.dataMap) != 0) { - // left data and right data - val rightKey = bm.getKey(rightIdx) - val rightValue = bm.getValue(rightIdx) - val rightOriginalHash = bm.getHash(rightIdx) - if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { - builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) - } else { - builder.addOne(leftKey, leftValue, leftOriginalHash) - builder.addOne(rightKey, rightValue, rightOriginalHash) - } - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - // left data and right node - val subNode = bm.getNode(bm.nodeIndex(bitpos)) - val leftImprovedHash = improve(leftOriginalHash) - val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) - if (removed eq subNode) { - // no overlap in leftData and rightNode, just build both children to builder - subNode.buildTo(builder) - builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) - } else { - // there is collision, so special treatment for that key - removed.buildTo(builder) - builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) - } - } else { - // left data and nothing on right - builder.addOne(leftKey, leftValue, leftOriginalHash) - } - leftIdx += 1 - } else if ((bitpos & nodeMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - // left node and right data - val rightKey = bm.getKey(rightIdx) - val rightValue = bm.getValue(rightIdx) - val rightOriginalHash = bm.getHash(rightIdx) - val rightImprovedHash = improve(rightOriginalHash) - - val subNode = getNode(nodeIndex(bitpos)) - val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) - if (removed eq subNode) { - // no overlap in leftNode and rightData, just build both children to builder - subNode.buildTo(builder) - builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) - } else { - // there is collision, so special treatment for that key - removed.buildTo(builder) - builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) - } - rightIdx += 1 - - } else if ((bitpos & bm.nodeMap) != 0) { - // left node and right node - getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) - } else { - // left node and nothing on right - getNode(nodeIndex(bitpos)).buildTo(builder) - } - } else if ((bitpos & bm.dataMap) != 0) { - // nothing on left, right data - val dataIndex = bm.dataIndex(bitpos) - builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) - rightIdx += 1 - - } else if ((bitpos & bm.nodeMap) != 0) { - // nothing on left, right node - bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) - } - - index += 1 - } - } - case _: HashCollisionMapNode[_, _] => - throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") - } - - override def equals(that: Any): Boolean = - that match { - case node: BitmapIndexedMapNode[_, _] => - (this eq node) || - (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && - (this.nodeMap == node.nodeMap) && - (this.dataMap == node.dataMap) && - (this.size == node.size) && - java.util.Arrays.equals(this.originalHashes, node.originalHashes) && - deepContentEquality(this.content, node.content, content.length) - case _ => false - } - - @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { - if (a1 eq a2) - true - else { - var isEqual = true - var i = 0 - - while (isEqual && i < length) { - isEqual = a1(i) == a2(i) - i += 1 - } - - isEqual - } - } - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { - case bm: BitmapIndexedMapNode[K, V] @unchecked => - if (size == 0) return bm - else if (bm.size == 0 || (bm eq this)) return this - else if (bm.size == 1) { - val originalHash = bm.getHash(0) - return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) - } - // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing - // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the - // currently-being-computed result, and `bm` - var anyChangesMadeSoFar = false - - val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap - - // minimumIndex is inclusive -- it is the first index for which there is data or nodes - val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) - // maximumIndex is inclusive -- it is the last index for which there is data or nodes - // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound - // of int bitposition representation - val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) - - var leftNodeRightNode = 0 - var leftDataRightNode = 0 - var leftNodeRightData = 0 - var leftDataOnly = 0 - var rightDataOnly = 0 - var leftNodeOnly = 0 - var rightNodeOnly = 0 - var leftDataRightDataMigrateToNode = 0 - var leftDataRightDataRightOverwrites = 0 - - var dataToNodeMigrationTargets = 0 - - { - var bitpos = minimumBitPos - var leftIdx = 0 - var rightIdx = 0 - var finished = false - - while (!finished) { - - if ((bitpos & dataMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - val leftOriginalHash = getHash(leftIdx) - if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { - leftDataRightDataRightOverwrites |= bitpos - } else { - leftDataRightDataMigrateToNode |= bitpos - dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) - } - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftDataRightNode |= bitpos - } else { - leftDataOnly |= bitpos - } - leftIdx += 1 - } else if ((bitpos & nodeMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - leftNodeRightData |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftNodeRightNode |= bitpos - } else { - leftNodeOnly |= bitpos - } - } else if ((bitpos & bm.dataMap) != 0) { - rightDataOnly |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - rightNodeOnly |= bitpos - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - - val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites - - val newNodeMap = - leftNodeRightNode | - leftDataRightNode | - leftNodeRightData | - leftNodeOnly | - rightNodeOnly | - dataToNodeMigrationTargets - - - if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { - // nothing from `this` will make it into the result -- return early - return bm - } - - val newDataSize = bitCount(newDataMap) - val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) - - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - var newSize = 0 - var newCachedHashCode = 0 - - { - var leftDataIdx = 0 - var rightDataIdx = 0 - var leftNodeIdx = 0 - var rightNodeIdx = 0 - - val nextShift = shift + Node.BitPartitionSize - - var compressedDataIdx = 0 - var compressedNodeIdx = 0 - - var bitpos = minimumBitPos - var finished = false - - while (!finished) { - - if ((bitpos & leftNodeRightNode) != 0) { - val rightNode = bm.getNode(rightNodeIdx) - val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) - if (rightNode ne newNode) { - anyChangesMadeSoFar = true - } - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataRightNode) != 0) { - val newNode = { - val n = bm.getNode(rightNodeIdx) - val leftKey = getKey(leftDataIdx) - val leftValue = getValue(leftDataIdx) - val leftOriginalHash = getHash(leftDataIdx) - val leftImproved = improve(leftOriginalHash) - - val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) - - if (updated ne n) { - anyChangesMadeSoFar = true - } - - updated - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } - else if ((bitpos & leftNodeRightData) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val rightOriginalHash = bm.getHash(rightDataIdx) - getNode(leftNodeIdx).updated( - key = bm.getKey(rightDataIdx), - value = bm.getValue(rightDataIdx), - originalHash = bm.getHash(rightDataIdx), - hash = improve(rightOriginalHash), - shift = nextShift, - replaceValue = true - ) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataOnly) != 0) { - anyChangesMadeSoFar = true - val originalHash = originalHashes(leftDataIdx) - newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] - newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - leftDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & rightDataOnly) != 0) { - val originalHash = bm.originalHashes(rightDataIdx) - newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] - newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & leftNodeOnly) != 0) { - anyChangesMadeSoFar = true - val newNode = getNode(leftNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & rightNodeOnly) != 0) { - val newNode = bm.getNode(rightNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val leftOriginalHash = getHash(leftDataIdx) - val rightOriginalHash = bm.getHash(rightDataIdx) - - bm.mergeTwoKeyValPairs( - getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), - bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), - nextShift - ) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftDataIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { - val originalHash = bm.originalHashes(rightDataIdx) - newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] - newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - leftDataIdx += 1 - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - if (anyChangesMadeSoFar) - new BitmapIndexedMapNode( - dataMap = newDataMap, - nodeMap = newNodeMap, - content = newContent, - originalHashes = newOriginalHashes, - size = newSize, - cachedJavaKeySetHashCode = newCachedHashCode - ) - else bm - - case _ => - // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") - } - - override def copy(): BitmapIndexedMapNode[K, V] = { - val contentClone = content.clone() - val contentLength = contentClone.length - var i = bitCount(dataMap) * TupleLength - while (i < contentLength) { - contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() - i += 1 - } - new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) - } - - override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { - if (size == 0) this - else if (size == 1) { - if (pred(getPayload(0)) != flipped) this else MapNode.empty - } else if (nodeMap == 0) { - // Performance optimization for nodes of depth 1: - // - // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler - // approach: - // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter - // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations - // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays - // - // note: - // * this optimization significantly improves performance of not only small trees, but also larger trees, since - // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as - // descendants - // - val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) - - var newDataMap = 0 - var newCachedHashCode = 0 - var dataIndex = 0 - - var i = minimumIndex - - while(i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } - - i += 1 - } - - if (newDataMap == 0) { - MapNode.empty - } else if (newDataMap == dataMap) { - this - } else { - val newSize = Integer.bitCount(newDataMap) - val newContent = new Array[Any](newSize * TupleLength) - val newOriginalHashCodes = new Array[Int](newSize) - val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) - - var j = Integer.numberOfTrailingZeros(newDataMap) - - var newDataIndex = 0 - - - while (j < newMaximumIndex) { - val bitpos = bitposFrom(j) - if ((bitpos & newDataMap) != 0) { - val oldIndex = indexFrom(dataMap, bitpos) - newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) - newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) - newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) - newDataIndex += 1 - } - j += 1 - } - - new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) - } - - - } else { - val allMap = dataMap | nodeMap - val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - var oldDataPassThrough = 0 - - // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data - var nodeMigrateToDataTargetMap = 0 - // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null - - // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node - var nodesToPassThroughMap = 0 - - // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself - // These are stored for later inclusion into the final `content` array - // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) - var mapOfNewNodes = 0 - // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[MapNode[K, V]] = null - - var newDataMap = 0 - var newNodeMap = 0 - var newSize = 0 - var newCachedHashCode = 0 - - var dataIndex = 0 - var nodeIndex = 0 - - var i = minimumIndex - while (i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - oldDataPassThrough |= bitpos - newSize += 1 - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - val oldSubNode = getNode(nodeIndex) - val newSubNode = oldSubNode.filterImpl(pred, flipped) - - newSize += newSubNode.size - newCachedHashCode += newSubNode.cachedJavaKeySetHashCode - - // if (newSubNode.size == 0) do nothing (drop it) - if (newSubNode.size > 1) { - newNodeMap |= bitpos - if (oldSubNode eq newSubNode) { - nodesToPassThroughMap |= bitpos - } else { - mapOfNewNodes |= bitpos - if (newNodes eq null) { - newNodes = mutable.Queue.empty[MapNode[K, V] @uncheckedCaptures] - } - newNodes += newSubNode - } - } else if (newSubNode.size == 1) { - newDataMap |= bitpos - nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue() - } - nodesToMigrateToData += newSubNode - } - - nodeIndex += 1 - } - - i += 1 - } - - if (newSize == 0) { - MapNode.empty - } else if (newSize == size) { - this - } else { - val newDataSize = bitCount(newDataMap) - val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - - val newAllMap = newDataMap | newNodeMap - val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) - - // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will - // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) - var i = minimumIndex - - var oldDataIndex = 0 - var oldNodeIndex = 0 - - var newDataIndex = 0 - var newNodeIndex = 0 - - while (i < maxIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & oldDataPassThrough) != 0) { - newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) - newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) - newOriginalHashes(newDataIndex) = getHash(oldDataIndex) - newDataIndex += 1 - oldDataIndex += 1 - } else if ((bitpos & nodesToPassThroughMap) != 0) { - newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { - // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null - val node = nodesToMigrateToData.dequeue() - newContent(TupleLength * newDataIndex) = node.getKey(0) - newContent(TupleLength * newDataIndex + 1) = node.getValue(0) - newOriginalHashes(newDataIndex) = node.getHash(0) - newDataIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & mapOfNewNodes) != 0) { - newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & dataMap) != 0) { - oldDataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - oldNodeIndex += 1 - } - - i += 1 - } - - new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) - } - } - } -} - -private final class HashCollisionMapNode[K, +V ]( - val originalHash: Int, - val hash: Int, - var content: Vector[(K, V @uV) @uncheckedCaptures] - ) extends MapNode[K, V] { - - import Node._ - - require(content.length >= 2) - - releaseFence() - - private[immutable] def indexOf(key: Any): Int = { - val iter = content.iterator - var i = 0 - while (iter.hasNext) { - if (iter.next()._1 == key) return i - i += 1 - } - -1 - } - - def size: Int = content.length - - def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException) - - def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = - if (this.hash == hash) { - val index = indexOf(key) - if (index >= 0) Some(content(index)._2) else None - } else None - - override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { - val index = indexOf(key) - if (index >= 0) content(index) else throw new NoSuchElementException - } - - def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { - if (this.hash == hash) { - indexOf(key) match { - case -1 => f - case other => content(other)._2 - } - } else f - } - - override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = - this.hash == hash && indexOf(key) >= 0 - - def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = - this.hash == hash && { - val index = indexOf(key) - index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) - } - - def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { - val index = indexOf(key) - if (index >= 0) { - if (replaceValue) { - if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { - this - } else { - new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) - } - } else { - this - } - } else { - new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) - } - } - - def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { - if (!this.containsKey(key, originalHash, hash, shift)) { - this - } else { - val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) - // assert(updatedContent.size == content.size - 1) - - updatedContent.size match { - case 1 => - val (k, v) = updatedContent(0) - new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) - case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) - } - } - } - - def hasNodes: Boolean = false - - def nodeArity: Int = 0 - - def getNode(index: Int): MapNode[K, V] = - throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") - - def hasPayload: Boolean = true - - def payloadArity: Int = content.length - - def getKey(index: Int): K = getPayload(index)._1 - def getValue(index: Int): V = getPayload(index)._2 - - def getPayload(index: Int): (K, V) = content(index) - - override def getHash(index: Int): Int = originalHash - - def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) - - def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} - - override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { - val iter = content.iterator - while (iter.hasNext) { - val next = iter.next() - f(next._1, next._2, originalHash) - } - } - - override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { - val newContent = Vector.newBuilder[(K, W)] - val contentIter = content.iterator - // true if any values have been transformed to a different value via `f` - var anyChanges = false - while(contentIter.hasNext) { - val (k, v) = contentIter.next() - val newValue = f(k, v) - newContent.addOne((k, newValue)) - anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) - } - if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) - else this.asInstanceOf[HashCollisionMapNode[K, W]] - } - - override def equals(that: Any): Boolean = - that match { - case node: HashCollisionMapNode[_, _] => - (this eq node) || - (this.hash == node.hash) && - (this.content.length == node.content.length) && { - val iter = content.iterator - while (iter.hasNext) { - val (key, value) = iter.next() - val index = node.indexOf(key) - if (index < 0 || value != node.content(index)._2) { - return false - } - } - true - } - case _ => false - } - - override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { - case hc: HashCollisionMapNode[K, V1] => - if (hc eq this) { - this - } else { - var newContent: VectorBuilder[(K, V1)] = null - val iter = content.iterator - while (iter.hasNext) { - val nextPayload = iter.next() - if (hc.indexOf(nextPayload._1) < 0) { - if (newContent eq null) { - newContent = new VectorBuilder[(K, V1)]() - newContent.addAll(hc.content) - } - newContent.addOne(nextPayload) - } - } - if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) - } - case _: BitmapIndexedMapNode[K, V1] => - // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") - } - - - override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { - case hc: HashCollisionMapNode[K, V1] => - val iter = content.iterator - val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] - - def rightIndexOf(key: K): Int = { - var i = 0 - while (i < rightArray.length) { - val elem = rightArray(i) - if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i - i += 1 - } - -1 - } - - while (iter.hasNext) { - val nextPayload = iter.next() - val index = rightIndexOf(nextPayload._1) - - if (index == -1) { - builder.addOne(nextPayload) - } else { - val rightPayload = rightArray(index).asInstanceOf[(K, V1)] - rightArray(index) = null - - builder.addOne(mergef(nextPayload, rightPayload)) - } - } - - var i = 0 - while (i < rightArray.length) { - val elem = rightArray(i) - if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) - i += 1 - } - case _: BitmapIndexedMapNode[K, V1] => - throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") - - } - - override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { - val iter = content.iterator - while (iter.hasNext) { - val (k, v) = iter.next() - builder.addOne(k, v, originalHash, hash) - } - } - - override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { - val newContent = content.filterImpl(pred, flipped) - val newContentLength = newContent.length - if (newContentLength == 0) { - MapNode.empty - } else if (newContentLength == 1) { - val (k, v) = newContent.head - new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) - } else if (newContentLength == content.length) this - else new HashCollisionMapNode(originalHash, hash, newContent) - } - - override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def cachedJavaKeySetHashCode: Int = size * hash - -} - -private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val key = currentValueNode.getKey(currentValueCursor) - currentValueCursor += 1 - - key - } - -} - -private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val value = currentValueNode.getValue(currentValueCursor) - currentValueCursor += 1 - - value - } -} - -private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor += 1 - - payload - } - -} - -private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor -= 1 - - payload - } -} - -private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { - private[this] var hash = 0 - private[this] var value: V @uncheckedCaptures = _ - override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) - def next() = { - if (!hasNext) - throw new NoSuchElementException - - hash = currentValueNode.getHash(currentValueCursor) - value = currentValueNode.getValue(currentValueCursor) - currentValueCursor -= 1 - this - } -} - -/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ -private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) { - /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ - def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { - var curr = rootMapNode - while (curr.size > 0 && hasNext) { - val originalHash = currentValueNode.getHash(currentValueCursor) - curr = curr.removed( - key = currentValueNode.getPayload(currentValueCursor), - keyHash = improve(originalHash), - originalHash = originalHash, - shift = 0 - ) - currentValueCursor += 1 - } - curr - } -} - -/** - * $factoryInfo - * - * @define Coll `immutable.HashMap` - * @define coll immutable champ hash map - */ -@SerialVersionUID(3L) -object HashMap extends MapFactory[HashMap] { - - @transient - private final val EmptyMap = new HashMap(MapNode.empty) - - def empty[K, V]: HashMap[K, V] = - EmptyMap.asInstanceOf[HashMap[K, V]] - - def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] = - source match { - case hs: HashMap[K, V] => hs - case _ => (newBuilder[K, V] ++= source).result() - } - - /** Create a new Builder which can be reused after calling `result()` without an - * intermediate call to `clear()` in order to build multiple related results. - */ - def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] -} - - -/** A Builder for a HashMap. - * $multipleResults - */ -private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { - import MapNode._ - import Node._ - - private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) - - /** The last given out HashMap as a return value of `result()`, if any, otherwise null. - * Indicates that on next add, the elements should be copied to an identical structure, before continuing - * mutations. */ - private var aliased: HashMap[K, V] @uncheckedCaptures = _ - - private def isAliased: Boolean = aliased != null - - /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedMapNode[K, V] @uncheckedCaptures = newEmptyRootNode - - private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = - if (rootNode.size == 0) value - else { - val originalHash = key.## - rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) - } - - /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ - private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { - if (ix < 0) throw new ArrayIndexOutOfBoundsException - if (ix > as.length) throw new ArrayIndexOutOfBoundsException - val result = new Array[Int](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } - - /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ - private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { - val dataIx = bm.dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = bm.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - dst(idx + 1) = value - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) - - bm.dataMap |= bitpos - bm.content = dst - bm.originalHashes = dstHashes - bm.size += 1 - bm.cachedJavaKeySetHashCode += keyHash - } - - /** Upserts a key/value pair into mapNode, mutably */ - private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { - mapNode match { - case bm: BitmapIndexedMapNode[K, V] => - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - if ((bm.dataMap & bitpos) != 0) { - val index = indexFrom(bm.dataMap, mask, bitpos) - val key0 = bm.getKey(index) - val key0UnimprovedHash = bm.getHash(index) - - if (key0UnimprovedHash == originalHash && key0 == key) { - bm.content(TupleLength * index + 1) = value - } else { - val value0 = bm.getValue(index) - val key0Hash = improve(key0UnimprovedHash) - - val subNodeNew: MapNode[K, V] = - bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - - bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) - } - - } else if ((bm.nodeMap & bitpos) != 0) { - val index = indexFrom(bm.nodeMap, mask, bitpos) - val subNode = bm.getNode(index) - val beforeSize = subNode.size - val beforeHash = subNode.cachedJavaKeySetHashCode - update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) - bm.size += subNode.size - beforeSize - bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash - } else { - insertValue(bm, bitpos, key, originalHash, keyHash, value) - } - case hc: HashCollisionMapNode[K, V] => - val index = hc.indexOf(key) - if (index < 0) { - hc.content = hc.content.appended((key, value)) - } else { - hc.content = hc.content.updated(index, (key, value)) - } - } - } - - /** If currently referencing aliased structure, copy elements to new mutable structure */ - private[this] def ensureUnaliased() = { - if (isAliased) copyElems() - aliased = null - } - - /** Copy elements to new mutable structure */ - private[this] def copyElems(): Unit = { - rootNode = rootNode.copy() - } - - override def result(): HashMap[K, V] = - if (rootNode.size == 0) { - HashMap.empty - } else if (aliased != null) { - aliased - } else { - aliased = new HashMap(rootNode) - releaseFence() - aliased - } - - override def addOne(elem: (K, V)): this.type = { - ensureUnaliased() - val h = elem._1.## - val im = improve(h) - update(rootNode, elem._1, elem._2, h, im, 0) - this - } - - def addOne(key: K, value: V): this.type = { - ensureUnaliased() - val originalHash = key.## - update(rootNode, key, value, originalHash, improve(originalHash), 0) - this - } - def addOne(key: K, value: V, originalHash: Int): this.type = { - ensureUnaliased() - update(rootNode, key, value, originalHash, improve(originalHash), 0) - this - } - def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { - ensureUnaliased() - update(rootNode, key, value, originalHash, hash, 0) - this - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - ensureUnaliased() - xs match { - case hm: HashMap[K, V] => - new ChampBaseIterator[MapNode[K, V]](hm.rootNode) { - while(hasNext) { - val originalHash = currentValueNode.getHash(currentValueCursor) - update( - mapNode = rootNode, - key = currentValueNode.getKey(currentValueCursor), - value = currentValueNode.getValue(currentValueCursor), - originalHash = originalHash, - keyHash = improve(originalHash), - shift = 0 - ) - currentValueCursor += 1 - } - }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position - case hm: collection.mutable.HashMap[K, V] => - val iter = hm.nodeIterator - while (iter.hasNext) { - val next = iter.next() - val originalHash = hm.unimproveHash(next.hash) - val hash = improve(originalHash) - update(rootNode, next.key, next.value, originalHash, hash, 0) - } - case lhm: collection.mutable.LinkedHashMap[K, V] => - val iter = lhm.entryIterator - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhm.unimproveHash(next.hash) - val hash = improve(originalHash) - update(rootNode, next.key, next.value, originalHash, hash, 0) - } - case thatMap: Map[K, V] => - thatMap.foreachEntry((key, value) => addOne(key, value)) - case other => - val it = other.iterator - while(it.hasNext) addOne(it.next()) - } - - this - } - - override def clear(): Unit = { - aliased = null - if (rootNode.size > 0) { - rootNode = newEmptyRootNode - } - } - - private[collection] def size: Int = rootNode.size - - override def knownSize: Int = rootNode.size -} diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala deleted file mode 100644 index 38f394a7005f..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/HashSet.scala +++ /dev/null @@ -1,2125 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.lang.Integer.{bitCount, numberOfTrailingZeros} -import java.lang.System.arraycopy - -import scala.collection.Hashing.improve -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder -import scala.runtime.Statics.releaseFence -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. - * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. - * - * @tparam A the type of the elements contained in this hash set. - * @define Coll `immutable.HashSet` - * @define coll immutable champ hash set - */ -final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) - extends AbstractSet[A] - with StrictOptimizedSetOps[A, HashSet, HashSet[A]] - with IterableFactoryDefaults[A, HashSet] - with DefaultSerializable { - - def this() = this(SetNode.empty) - - // This release fence is present because rootNode may have previously been mutated during construction. - releaseFence() - - private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = - if (rootNode eq newRootNode) this else new HashSet(newRootNode) - - override def iterableFactory: IterableFactory[HashSet] = HashSet - - override def knownSize: Int = rootNode.size - - override def size: Int = rootNode.size - - override def isEmpty: Boolean = rootNode.size == 0 - - def iterator: Iterator[A] = { - if (isEmpty) Iterator.empty - else new SetIterator[A](rootNode) - } - - protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) - case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) - case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) - } - s.asInstanceOf[S with EfficientSplit] - } - - def contains(element: A): Boolean = { - val elementUnimprovedHash = element.## - val elementHash = improve(elementUnimprovedHash) - rootNode.contains(element, elementUnimprovedHash, elementHash, 0) - } - - def incl(element: A): HashSet[A] = { - val elementUnimprovedHash = element.## - val elementHash = improve(elementUnimprovedHash) - val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) - newHashSetOrThis(newRootNode) - } - - def excl(element: A): HashSet[A] = { - val elementUnimprovedHash = element.## - val elementHash = improve(elementUnimprovedHash) - val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) - newHashSetOrThis(newRootNode) - } - - override def concat(that: IterableOnce[A]): HashSet[A] = - that match { - case hs: HashSet[A] => - if (isEmpty) hs - else { - val newNode = rootNode.concat(hs.rootNode, 0) - if (newNode eq hs.rootNode) hs - else newHashSetOrThis(newNode) - } - case hs: collection.mutable.HashSet[A] => - val iter = hs.nodeIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = hs.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, originalHash, improved, 0) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - while (iter.hasNext) { - val next = iter.next() - val originalHash = hs.unimproveHash(next.hash) - val improved = improve(originalHash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) - } - return new HashSet(current) - } - } - this - case lhs: collection.mutable.LinkedHashSet[A] => - val iter = lhs.entryIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhs.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, originalHash, improved, 0) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhs.unimproveHash(next.hash) - val improved = improve(originalHash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) - } - return new HashSet(current) - } - } - this - case _ => - val iter = that.iterator - var current = rootNode - while (iter.hasNext) { - val element = iter.next() - val originalHash = element.## - val improved = improve(originalHash) - current = current.updated(element, originalHash, improved, 0) - - if (current ne rootNode) { - // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that - // the first changed key ended up in a subnode beneath root, we mark that root right away as being - // shallowly mutable. - // - // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with - // certainty that it either caused a new subnode to be created underneath `current`, in which case we should - // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is - // done by including its bit position in the shallowlyMutableNodeMap anyways. - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - while (iter.hasNext) { - val element = iter.next() - val originalHash = element.## - val improved = improve(originalHash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) - } - return new HashSet(current) - } - } - this - } - - override def tail: HashSet[A] = this - head - - override def init: HashSet[A] = this - last - - override def head: A = iterator.next() - - override def last: A = reverseIterator.next() - - override def foreach[U](f: A => U): Unit = rootNode.foreach(f) - - /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ - @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) - - /** Applies a function f to each element, and its corresponding **original** hash, in this Set - * Stops iterating the first time that f returns `false`.*/ - @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) - - def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match { - case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) - case _ => super.subsetOf(that) - } - - override def equals(that: Any): Boolean = - that match { - case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) - case _ => super.equals(that) - } - - override protected[this] def className = "HashSet" - - override def hashCode(): Int = { - val it = new SetHashIterator(rootNode) - val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) - //assert(hash == super.hashCode()) - hash - } - - override def diff(that: collection.Set[A]): HashSet[A] = { - if (isEmpty) { - this - } else { - that match { - case hashSet: HashSet[A] => - if (hashSet.isEmpty) this else { - val newRootNode = rootNode.diff(hashSet.rootNode, 0) - if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) - } - case hashSet: collection.mutable.HashSet[A] => - val iter = hashSet.nodeIterator - var curr = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = hashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - curr = curr.removed(next.key, originalHash, improved, 0) - if (curr ne rootNode) { - if (curr.size == 0) { - return HashSet.empty - } - while (iter.hasNext) { - val next = iter.next() - val originalHash = hashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - - curr.removeWithShallowMutations(next.key, originalHash, improved) - - if (curr.size == 0) { - return HashSet.empty - } - } - return new HashSet(curr) - } - } - this - - case other => - val thatKnownSize = other.knownSize - - if (thatKnownSize == 0) { - this - } else if (thatKnownSize <= size) { - /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so - we're likely to be the faster of the two at that. */ - removedAllWithShallowMutations(other) - } else { - // TODO: Develop more sophisticated heuristic for which branch to take - filterNot(other.contains) - } - } - - } - } - - /** Immutably removes all elements of `that` from this HashSet - * - * Mutation is used internally, but only on root SetNodes which this method itself creates. - * - * That is, this method is safe to call on published sets because it does not mutate `this` - */ - private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { - val iter = that.iterator - var curr = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = next.## - val improved = improve(originalHash) - curr = curr.removed(next, originalHash, improved, 0) - if (curr ne rootNode) { - if (curr.size == 0) { - return HashSet.empty - } - while (iter.hasNext) { - val next = iter.next() - val originalHash = next.## - val improved = improve(originalHash) - - curr.removeWithShallowMutations(next, originalHash, improved) - - if (curr.size == 0) { - return HashSet.empty - } - } - return new HashSet(curr) - } - } - this - } - - override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { - case set: scala.collection.Set[A] => diff(set) - case range: Range if range.length > size => - filter { - case i: Int => !range.contains(i) - case _ => true - } - - case _ => - removedAllWithShallowMutations(that) - } - - override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.partition(p) - } - - override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.span(p) - } - - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { - val newRootNode = rootNode.filterImpl(pred, isFlipped) - if (newRootNode eq rootNode) this - else if (newRootNode.size == 0) HashSet.empty - else new HashSet(newRootNode) - } - - override def intersect(that: collection.Set[A]): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.intersect(that) - } - - override def take(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.take(n) - } - - override def takeRight(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.takeRight(n) - } - - override def takeWhile(p: A => Boolean): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.takeWhile(p) - } - - override def drop(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.drop(n) - } - - override def dropRight(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.dropRight(n) - } - - override def dropWhile(p: A => Boolean): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.dropWhile(p) - } -} - -private[immutable] object SetNode { - - private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) - - def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] - - final val TupleLength = 1 - -} - -private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { - - def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean - - def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] - - def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] - - def hasNodes: Boolean - - def nodeArity: Int - - def getNode(index: Int): SetNode[A] - - def hasPayload: Boolean - - def payloadArity: Int - - def getPayload(index: Int): A - - def size: Int - - def foreach[U](f: A => U): Unit - - def subsetOf(that: SetNode[A], shift: Int): Boolean - - def copy(): SetNode[A] - - def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] - - def diff(that: SetNode[A], shift: Int): SetNode[A] - - def concat(that: SetNode[A], shift: Int): SetNode[A] - - def foreachWithHash(f: (A, Int) => Unit): Unit - - def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean -} - -private final class BitmapIndexedSetNode[A]( - var dataMap: Int, - var nodeMap: Int, - var content: Array[Any], - var originalHashes: Array[Int], - var size: Int, - var cachedJavaKeySetHashCode: Int) extends SetNode[A] { - - import Node._ - import SetNode._ - - /* - assert(checkInvariantContentIsWellTyped()) - assert(checkInvariantSubNodesAreCompacted()) - - private final def checkInvariantSubNodesAreCompacted(): Boolean = - new SetIterator[A](this).size - payloadArity >= 2 * nodeArity - - private final def checkInvariantContentIsWellTyped(): Boolean = { - val predicate1 = TupleLength * payloadArity + nodeArity == content.length - - val predicate2 = Range(0, TupleLength * payloadArity) - .forall(i => content(i).isInstanceOf[SetNode[_]] == false) - - val predicate3 = Range(TupleLength * payloadArity, content.length) - .forall(i => content(i).isInstanceOf[SetNode[_]] == true) - - predicate1 && predicate2 && predicate3 - } - */ - - def getPayload(index: Int): A = content(index).asInstanceOf[A] - - override def getHash(index: Int): Int = originalHashes(index) - - def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] - - def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - return originalHashes(index) == originalHash && element == this.getPayload(index) - } - - if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) - } - - false - } - - def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = this.getPayload(index) - - if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { - return this - } else { - val element0UnimprovedHash = getHash(index) - val element0Hash = improve(element0UnimprovedHash) - if (originalHash == element0UnimprovedHash && element0 == element) { - return this - } else { - val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) - return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) - } - } - } - if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) - if (subNode eq subNodeNew) { - return this - } else { - return copyAndSetNode(bitpos, subNode, subNodeNew) - } - } - - copyAndInsertValue(bitpos, element, originalHash, elementHash) - } - /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately - * descendant child nodes (only one level beneath `this`) - * - * The caller should pass a bitmap of child nodes of this node, which this method may mutate. - * If this method may mutate a child node, then if the updated value is located in that child node, it will - * be shallowly mutated (its children will not be mutated). - * - * If instead this method may not mutate the child node in which the to-be-updated value is located, then - * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. - * - * @param key the key to update - * @param originalHash key.## - * @param keyHash the improved hash - * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated - * during the call to this method - * - * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be - * available for mutations in subsequent calls. - */ - def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = getPayload(index) - val element0UnimprovedHash = getHash(index) - if (element0UnimprovedHash == originalHash && element0 == element) { - shallowlyMutableNodeMap - } else { - val element0Hash = improve(element0UnimprovedHash) - val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) - migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) - shallowlyMutableNodeMap | bitpos - } - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - val subNodeSize = subNode.size - val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode - - var returnNodeMap = shallowlyMutableNodeMap - - val subNodeNew: SetNode[A] = subNode match { - case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => - subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) - subNodeBm - case _ => - val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) - if (subNodeNew ne subNode) { - returnNodeMap |= bitpos - } - subNodeNew - } - - this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew - this.size = this.size - subNodeSize + subNodeNew.size - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode - returnNodeMap - } else { - val dataIx = dataIndex(bitpos) - val idx = dataIx - - val src = this.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = element - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(originalHashes, dataIx, originalHash) - - this.dataMap |= bitpos - this.content = dst - this.originalHashes = dstHashes - this.size += 1 - this.cachedJavaKeySetHashCode += elementHash - shallowlyMutableNodeMap - } - } - - - def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = this.getPayload(index) - - if (element0 == element) { - if (this.payloadArity == 2 && this.nodeArity == 0) { - /* - * Create new node with remaining pair. The new node will a) either become the new root - * returned, or b) unwrapped and inlined during returning. - */ - val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) - if (index == 0) - return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) - else - return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) - } - else return copyAndRemoveValue(bitpos, elementHash) - } else return this - } - - if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) - - if (subNodeNew eq subNode) return this - - // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided - // in Vector#length - val subNodeNewSize = subNodeNew.size - - if (subNodeNewSize == 1) { - if (this.size == subNode.size) { - // subNode is the only child (no other data or node children of `this` exist) - // escalate (singleton or empty) result - return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] - } else { - // inline value (move to front) - return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) - } - } else if (subNodeNewSize > 1) { - // modify current node (set replacement node) - return copyAndSetNode(bitpos, subNode, subNodeNew) - } - } - - this - } - /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new - * node - * - * Should only be called on root nodes, because shift is assumed to be 0 - * - * @param element the element to remove - * @param originalHash the original hash of `element` - * @param elementHash the improved hash of `element` - */ - def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { - val mask = maskFrom(elementHash, 0) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = this.getPayload(index) - - if (element0 == element) { - if (this.payloadArity == 2 && this.nodeArity == 0) { - val newDataMap = dataMap ^ bitpos - if (index == 0) { - val newContent = Array[Any](getPayload(1)) - val newOriginalHashes = Array(originalHashes(1)) - val newCachedJavaKeySetHashCode = improve(getHash(1)) - this.content = newContent - this.originalHashes = newOriginalHashes - this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode - } else { - val newContent = Array[Any](getPayload(0)) - val newOriginalHashes = Array(originalHashes(0)) - val newCachedJavaKeySetHashCode = improve(getHash(0)) - this.content = newContent - this.originalHashes = newOriginalHashes - this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode - } - this.dataMap = newDataMap - this.nodeMap = 0 - this.size = 1 - this - } - else { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length - TupleLength) - - arraycopy(src, 0, dst, 0, idx) - arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - this.dataMap = this.dataMap ^ bitpos - this.content = dst - this.originalHashes = dstHashes - this.size -= 1 - this.cachedJavaKeySetHashCode -= elementHash - this - } - } else this - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] - - if (subNodeNew eq subNode) return this - - if (subNodeNew.size == 1) { - if (this.payloadArity == 0 && this.nodeArity == 1) { - this.dataMap = subNodeNew.dataMap - this.nodeMap = subNodeNew.nodeMap - this.content = subNodeNew.content - this.originalHashes = subNodeNew.originalHashes - this.size = subNodeNew.size - this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode - this - } else { - migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) - this - } - } else { - // size must be > 1 - this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew - this.size -= 1 - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode - this - } - } else this - } - - def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { - // assert(key0 != key1) - - if (shift >= HashCodeLength) { - new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) - } else { - val mask0 = maskFrom(keyHash0, shift) - val mask1 = maskFrom(keyHash1, shift) - - if (mask0 != mask1) { - // unique prefixes, payload fits on same level - val dataMap = bitposFrom(mask0) | bitposFrom(mask1) - val newCachedHashCode = keyHash0 + keyHash1 - - if (mask0 < mask1) { - new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) - } else { - new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) - } - } else { - // identical prefixes, payload must be disambiguated deeper in the trie - val nodeMap = bitposFrom(mask0) - val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) - - new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) - } - } - } - - def hasPayload: Boolean = dataMap != 0 - - def payloadArity: Int = bitCount(dataMap) - - def hasNodes: Boolean = nodeMap != 0 - - def nodeArity: Int = bitCount(nodeMap) - - def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) - - def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) - - def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { - val idx = this.content.length - 1 - this.nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - dst(idx) = newNode - new BitmapIndexedSetNode[A]( - dataMap = dataMap, - nodeMap = nodeMap, - content = dst, - originalHashes = originalHashes, - size = size - oldNode.size + newNode.size, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode - ) - } - - def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length + 1) - - // copy 'src' and insert 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - arraycopy(src, idx, dst, idx + 1, src.length - idx) - val dstHashes = insertElement(originalHashes, dataIx, originalHash) - - new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) - } - - def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - dst(idx) = key - - new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) - } - - def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length - 1) - - // copy 'src' and remove 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) - val dstHashes = removeElement(originalHashes, dataIx) - new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) - } - - def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length - 1 + 1) - - // copy 'src' and remove 1 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld <= idxNew) - arraycopy(src, 0, dst, 0, idxOld) - arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) - dst(idxNew) = node - arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) - val dstHashes = removeElement(originalHashes, dataIx) - new BitmapIndexedSetNode[A]( - dataMap = dataMap ^ bitpos, - nodeMap = nodeMap | bitpos, - content = dst, originalHashes = dstHashes, - size = size - 1 + node.size, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode - ) - } - /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. - * - * Note: This method will mutate `this`, and will mutate `this.content` - * - * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, - * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, - * we reuse this.content by shifting data/nodes around, rather than allocating a new array. - * - * @param bitpos the bit position of the data to migrate to node - * @param keyHash the improved hash of the element currently at `bitpos` - * @param node the node to place at `bitpos` - */ - def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) - content(idxNew) = node - - this.dataMap = this.dataMap ^ bitpos - this.nodeMap = this.nodeMap | bitpos - this.originalHashes = removeElement(originalHashes, dataIx) - this.size = this.size - 1 + node.size - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode - this - } - - def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { - val idxOld = this.content.length - 1 - nodeIndex(bitpos) - val dataIxNew = dataIndex(bitpos) - val idxNew = TupleLength * dataIxNew - - val src = this.content - val dst = new Array[Any](src.length - 1 + 1) - - // copy 'src' and remove 1 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld >= idxNew) - arraycopy(src, 0, dst, 0, idxNew) - dst(idxNew) = node.getPayload(0) - arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) - arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) - val hash = node.getHash(0) - val dstHashes = insertElement(originalHashes, dataIxNew, hash) - new BitmapIndexedSetNode[A]( - dataMap = dataMap | bitpos, - nodeMap = nodeMap ^ bitpos, - content = dst, - originalHashes = dstHashes, - size = size - oldNode.size + 1, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode - ) - } - - /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. - * - * Note: This method will mutate `this`, and will mutate `this.content` - * - * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, - * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, - * we reuse this.content by shifting data/nodes around, rather than allocating a new array. - * - * @param bitpos the bit position of the node to migrate inline - * @param oldNode the node currently stored at position `bitpos` - * @param node the node containing the single element to migrate inline - */ - def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { - val idxOld = this.content.length - 1 - nodeIndex(bitpos) - val dataIxNew = dataIndex(bitpos) - val element = node.getPayload(0) - arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) - content(dataIxNew) = element - val hash = node.getHash(0) - val dstHashes = insertElement(originalHashes, dataIxNew, hash) - - this.dataMap = this.dataMap | bitpos - this.nodeMap = this.nodeMap ^ bitpos - this.originalHashes = dstHashes - this.size = this.size - oldNode.size + 1 - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode - } - - def foreach[U](f: A => U): Unit = { - val thisPayloadArity = payloadArity - var i = 0 - while (i < thisPayloadArity) { - f(getPayload(i)) - i += 1 - } - - val thisNodeArity = nodeArity - var j = 0 - while (j < thisNodeArity) { - getNode(j).foreach(f) - j += 1 - } - } - - def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { - case _: HashCollisionSetNode[A] => false - case node: BitmapIndexedSetNode[A] => - val thisBitmap = this.dataMap | this.nodeMap - val nodeBitmap = node.dataMap | node.nodeMap - - if ((thisBitmap | nodeBitmap) != nodeBitmap) - return false - - var bitmap = thisBitmap & nodeBitmap - var bitsToSkip = numberOfTrailingZeros(bitmap) - - var isValidSubset = true - while (isValidSubset && bitsToSkip < HashCodeLength) { - val bitpos = bitposFrom(bitsToSkip) - - isValidSubset = - if ((this.dataMap & bitpos) != 0) { - if ((node.dataMap & bitpos) != 0) { - // Data x Data - val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) - val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) - payload0 == payload1 - } else { - // Data x Node - val thisDataIndex = indexFrom(this.dataMap, bitpos) - val payload = this.getPayload(thisDataIndex) - val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) - val elementUnimprovedHash = getHash(thisDataIndex) - val elementHash = improve(elementUnimprovedHash) - subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) - } - } else { - // Node x Node - val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) - val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) - subNode0.subsetOf(subNode1, shift + BitPartitionSize) - } - - val newBitmap = bitmap ^ bitpos - bitmap = newBitmap - bitsToSkip = numberOfTrailingZeros(newBitmap) - } - isValidSubset - } - - override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { - if (size == 0) this - else if (size == 1) { - if (pred(getPayload(0)) != flipped) this else SetNode.empty - } else if (nodeMap == 0) { - // Performance optimization for nodes of depth 1: - // - // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler - // approach: - // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter - // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations - // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays - // - // note: - // * this optimization significantly improves performance of not only small trees, but also larger trees, since - // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as - // descendants - // - val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) - - var newDataMap = 0 - var newCachedHashCode = 0 - var dataIndex = 0 - - var i = minimumIndex - - while(i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } - - i += 1 - } - - if (newDataMap == 0) { - SetNode.empty - } else if (newDataMap == dataMap) { - this - } else { - val newSize = Integer.bitCount(newDataMap) - val newContent = new Array[Any](newSize) - val newOriginalHashCodes = new Array[Int](newSize) - val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) - - var j = Integer.numberOfTrailingZeros(newDataMap) - - var newDataIndex = 0 - - while (j < newMaximumIndex) { - val bitpos = bitposFrom(j) - if ((bitpos & newDataMap) != 0) { - val oldIndex = indexFrom(dataMap, bitpos) - newContent(newDataIndex) = content(oldIndex) - newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) - newDataIndex += 1 - } - j += 1 - } - - new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) - } - } else { - val allMap = dataMap | nodeMap - val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - var oldDataPassThrough = 0 - - // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data - var nodeMigrateToDataTargetMap = 0 - - // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, - // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in - // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may - // return at runtime a SetNode[A], or a tuple of (A, Int, Int) - - // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null - - // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node - var nodesToPassThroughMap = 0 - - // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself - // These are stored for later inclusion into the final `content` array - // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) - var mapOfNewNodes = 0 - // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[SetNode[A]] = null - - var newDataMap = 0 - var newNodeMap = 0 - var newSize = 0 - var newCachedHashCode = 0 - - var dataIndex = 0 - var nodeIndex = 0 - - var i = minimumIndex - while (i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - oldDataPassThrough |= bitpos - newSize += 1 - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - val oldSubNode = getNode(nodeIndex) - val newSubNode = oldSubNode.filterImpl(pred, flipped) - - newSize += newSubNode.size - newCachedHashCode += newSubNode.cachedJavaKeySetHashCode - - // if (newSubNode.size == 0) do nothing (drop it) - if (newSubNode.size > 1) { - newNodeMap |= bitpos - if (oldSubNode eq newSubNode) { - nodesToPassThroughMap |= bitpos - } else { - mapOfNewNodes |= bitpos - if (newNodes eq null) { - newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - newNodes += newSubNode - } - } else if (newSubNode.size == 1) { - newDataMap |= bitpos - nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - nodesToMigrateToData += newSubNode - } - - nodeIndex += 1 - } - - i += 1 - } - - this.newNodeFrom( - newSize = newSize, - newDataMap = newDataMap, - newNodeMap = newNodeMap, - minimumIndex = minimumIndex, - oldDataPassThrough = oldDataPassThrough, - nodesToPassThroughMap = nodesToPassThroughMap, - nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, - nodesToMigrateToData = nodesToMigrateToData, - mapOfNewNodes = mapOfNewNodes, - newNodes = newNodes, - newCachedHashCode = newCachedHashCode - ) - } - } - - override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { - case bm: BitmapIndexedSetNode[A] => - if (size == 0) this - else if (size == 1) { - val h = getHash(0) - if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this - } else { - val allMap = dataMap | nodeMap - val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - var oldDataPassThrough = 0 - - // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data - var nodeMigrateToDataTargetMap = 0 - // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null - - // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node - var nodesToPassThroughMap = 0 - - // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself - // These are stored for later inclusion into the final `content` array - // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) - var mapOfNewNodes = 0 - // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[SetNode[A]] = null - - var newDataMap = 0 - var newNodeMap = 0 - var newSize = 0 - var newCachedHashCode = 0 - - var dataIndex = 0 - var nodeIndex = 0 - - var i = minimumIndex - while (i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val originalHash = getHash(dataIndex) - val hash = improve(originalHash) - - if (!bm.contains(payload, originalHash, hash, shift)) { - newDataMap |= bitpos - oldDataPassThrough |= bitpos - newSize += 1 - newCachedHashCode += hash - } - - dataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - val oldSubNode = getNode(nodeIndex) - - val newSubNode: SetNode[A] = - if ((bitpos & bm.dataMap) != 0) { - val thatDataIndex = indexFrom(bm.dataMap, bitpos) - val thatPayload = bm.getPayload(thatDataIndex) - val thatOriginalHash = bm.getHash(thatDataIndex) - val thatHash = improve(thatOriginalHash) - oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) - } else if ((bitpos & bm.nodeMap) != 0) { - oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) - } else { - oldSubNode - } - - newSize += newSubNode.size - newCachedHashCode += newSubNode.cachedJavaKeySetHashCode - - // if (newSubNode.size == 0) do nothing (drop it) - if (newSubNode.size > 1) { - newNodeMap |= bitpos - if (oldSubNode eq newSubNode) { - nodesToPassThroughMap |= bitpos - } else { - mapOfNewNodes |= bitpos - if (newNodes eq null) { - newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - newNodes += newSubNode - } - } else if (newSubNode.size == 1) { - newDataMap |= bitpos - nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - nodesToMigrateToData += newSubNode - } - - nodeIndex += 1 - } - - i += 1 - } - this.newNodeFrom( - newSize = newSize, - newDataMap = newDataMap, - newNodeMap = newNodeMap, - minimumIndex = minimumIndex, - oldDataPassThrough = oldDataPassThrough, - nodesToPassThroughMap = nodesToPassThroughMap, - nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, - nodesToMigrateToData = nodesToMigrateToData, - mapOfNewNodes = mapOfNewNodes, - newNodes = newNodes, - newCachedHashCode = newCachedHashCode - ) - } - case _: HashCollisionSetNode[A] => - // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the - // same depth - throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") - } - - /** Utility method only for use in `diff` and `filterImpl` - * - * @param newSize the size of the new SetNode - * @param newDataMap the dataMap of the new SetNode - * @param newNodeMap the nodeMap of the new SetNode - * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new - * SetNode - * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new - * SetNode - * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode - * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, - * but which were nodes in `this` - * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated - * to data, in positions in the `nodeMigrateToDataTargetMap` - * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode - * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode - * @param newCachedHashCode the cached java keyset hashcode of the new SetNode - */ - private[this] def newNodeFrom( - newSize: Int, - newDataMap: Int, - newNodeMap: Int, - minimumIndex: Int, - oldDataPassThrough: Int, - nodesToPassThroughMap: Int, - nodeMigrateToDataTargetMap: Int, - nodesToMigrateToData: mutable.Queue[SetNode[A]], - mapOfNewNodes: Int, - newNodes: mutable.Queue[SetNode[A]], - newCachedHashCode: Int): BitmapIndexedSetNode[A] = { - if (newSize == 0) { - SetNode.empty - } else if (newSize == size) { - this - } else { - val newDataSize = bitCount(newDataMap) - val newContentSize = newDataSize + bitCount(newNodeMap) - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - - val newAllMap = newDataMap | newNodeMap - val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) - - // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will - // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) - var i = minimumIndex - - var oldDataIndex = 0 - var oldNodeIndex = 0 - - var newDataIndex = 0 - var newNodeIndex = 0 - - while (i < maxIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & oldDataPassThrough) != 0) { - newContent(newDataIndex) = getPayload(oldDataIndex) - newOriginalHashes(newDataIndex) = getHash(oldDataIndex) - newDataIndex += 1 - oldDataIndex += 1 - } else if ((bitpos & nodesToPassThroughMap) != 0) { - newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { - // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null - val node = nodesToMigrateToData.dequeue() - newContent(newDataIndex) = node.getPayload(0) - newOriginalHashes(newDataIndex) = node.getHash(0) - newDataIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & mapOfNewNodes) != 0) { - // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null - newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & dataMap) != 0) { - oldDataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - oldNodeIndex += 1 - } - - i += 1 - } - - new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) - } - } - - - override def equals(that: Any): Boolean = - that match { - case node: BitmapIndexedSetNode[_] => - (this eq node) || - (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && - (this.nodeMap == node.nodeMap) && - (this.dataMap == node.dataMap) && - (this.size == node.size) && - java.util.Arrays.equals(this.originalHashes, node.originalHashes) && - deepContentEquality(this.content, node.content, content.length) - case _ => false - } - - @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { - if (a1 eq a2) - true - else { - var isEqual = true - var i = 0 - - while (isEqual && i < length) { - isEqual = a1(i) == a2(i) - i += 1 - } - - isEqual - } - } - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def copy(): BitmapIndexedSetNode[A] = { - val contentClone = content.clone() - val contentLength = contentClone.length - var i = bitCount(dataMap) - while (i < contentLength) { - contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() - i += 1 - } - new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) - } - - override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { - case bm: BitmapIndexedSetNode[A] => - if (size == 0) return bm - else if (bm.size == 0 || (bm eq this)) return this - else if (bm.size == 1) { - val originalHash = bm.getHash(0) - return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) - } - - // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing - // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the - // currently-being-computed result, and `this` - var anyChangesMadeSoFar = false - - // bitmap containing `1` in any position that has any descendant in either left or right, either data or node - val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap - - // minimumIndex is inclusive -- it is the first index for which there is data or nodes - val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) - // maximumIndex is inclusive -- it is the last index for which there is data or nodes - // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound - // of int bitposition representation - val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) - - var leftNodeRightNode = 0 - var leftDataRightNode = 0 - var leftNodeRightData = 0 - var leftDataOnly = 0 - var rightDataOnly = 0 - var leftNodeOnly = 0 - var rightNodeOnly = 0 - var leftDataRightDataMigrateToNode = 0 - var leftDataRightDataLeftOverwrites = 0 - - var dataToNodeMigrationTargets = 0 - - { - var bitpos = minimumBitPos - var leftIdx = 0 - var rightIdx = 0 - var finished = false - - while (!finished) { - - if ((bitpos & dataMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { - leftDataRightDataLeftOverwrites |= bitpos - } else { - leftDataRightDataMigrateToNode |= bitpos - dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) - } - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftDataRightNode |= bitpos - } else { - leftDataOnly |= bitpos - } - leftIdx += 1 - } else if ((bitpos & nodeMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - leftNodeRightData |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftNodeRightNode |= bitpos - } else { - leftNodeOnly |= bitpos - } - } else if ((bitpos & bm.dataMap) != 0) { - rightDataOnly |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - rightNodeOnly |= bitpos - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - - val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites - - val newNodeMap = - leftNodeRightNode | - leftDataRightNode | - leftNodeRightData | - leftNodeOnly | - rightNodeOnly | - dataToNodeMigrationTargets - - - if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { - // nothing from `bm` will make it into the result -- return early - return this - } - - val newDataSize = bitCount(newDataMap) - val newContentSize = newDataSize + bitCount(newNodeMap) - - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - var newSize = 0 - var newCachedHashCode = 0 - - { - var leftDataIdx = 0 - var rightDataIdx = 0 - var leftNodeIdx = 0 - var rightNodeIdx = 0 - - val nextShift = shift + Node.BitPartitionSize - - var compressedDataIdx = 0 - var compressedNodeIdx = 0 - - var bitpos = minimumBitPos - var finished = false - - while (!finished) { - - if ((bitpos & leftNodeRightNode) != 0) { - val leftNode = getNode(leftNodeIdx) - val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) - if (leftNode ne newNode) { - anyChangesMadeSoFar = true - } - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataRightNode) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val n = bm.getNode(rightNodeIdx) - val leftPayload = getPayload(leftDataIdx) - val leftOriginalHash = getHash(leftDataIdx) - val leftImproved = improve(leftOriginalHash) - n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } - else if ((bitpos & leftNodeRightData) != 0) { - val newNode = { - val rightOriginalHash = bm.getHash(rightDataIdx) - val leftNode = getNode(leftNodeIdx) - val updated = leftNode.updated( - element = bm.getPayload(rightDataIdx), - originalHash = bm.getHash(rightDataIdx), - hash = improve(rightOriginalHash), - shift = nextShift - ) - if (updated ne leftNode) { - anyChangesMadeSoFar = true - } - updated - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataOnly) != 0) { - val originalHash = originalHashes(leftDataIdx) - newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - leftDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & rightDataOnly) != 0) { - anyChangesMadeSoFar = true - val originalHash = bm.originalHashes(rightDataIdx) - newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & leftNodeOnly) != 0) { - val newNode = getNode(leftNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & rightNodeOnly) != 0) { - anyChangesMadeSoFar = true - val newNode = bm.getNode(rightNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val leftOriginalHash = getHash(leftDataIdx) - val rightOriginalHash = bm.getHash(rightDataIdx) - - bm.mergeTwoKeyValPairs( - getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), - bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), - nextShift - ) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftDataIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { - val originalHash = bm.originalHashes(rightDataIdx) - newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - leftDataIdx += 1 - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - if (anyChangesMadeSoFar) - new BitmapIndexedSetNode( - dataMap = newDataMap, - nodeMap = newNodeMap, - content = newContent, - originalHashes = newOriginalHashes, - size = newSize, - cachedJavaKeySetHashCode = newCachedHashCode - ) - else this - - case _ => - // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") - } - - override def foreachWithHash(f: (A, Int) => Unit): Unit = { - val iN = payloadArity // arity doesn't change during this operation - var i = 0 - while (i < iN) { - f(getPayload(i), getHash(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreachWithHash(f) - j += 1 - } - } - - override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { - val thisPayloadArity = payloadArity - var pass = true - var i = 0 - while (i < thisPayloadArity && pass) { - pass &&= f(getPayload(i), getHash(i)) - i += 1 - } - - val thisNodeArity = nodeArity - var j = 0 - while (j < thisNodeArity && pass) { - pass &&= getNode(j).foreachWithHashWhile(f) - j += 1 - } - pass - } -} - -private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A] @uncheckedCaptures) extends SetNode[A] { - - import Node._ - - require(content.length >= 2) - - def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = - this.hash == hash && content.contains(element) - - def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = - if (this.contains(element, originalHash, hash, shift)) { - this - } else { - new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) - } - - /** - * Remove an element from the hash collision node. - * - * When after deletion only one element remains, we return a bit-mapped indexed node with a - * singleton element and a hash-prefix for trie level 0. This node will be then a) either become - * the new root, or b) unwrapped and inlined deeper in the trie. - */ - def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = - if (!this.contains(element, originalHash, hash, shift)) { - this - } else { - val updatedContent = content.filterNot(element0 => element0 == element) - // assert(updatedContent.size == content.size - 1) - - updatedContent.size match { - case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) - case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) - } - } - - def hasNodes: Boolean = false - - def nodeArity: Int = 0 - - def getNode(index: Int): SetNode[A] = - throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") - - def hasPayload: Boolean = true - - def payloadArity: Int = content.length - - def getPayload(index: Int): A = content(index) - - override def getHash(index: Int): Int = originalHash - - def size: Int = content.length - - def foreach[U](f: A => U): Unit = { - val iter = content.iterator - while (iter.hasNext) { - f(iter.next()) - } - } - - - override def cachedJavaKeySetHashCode: Int = size * hash - - def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { - case node: HashCollisionSetNode[A] => - this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) - case _ => - false - } - - override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { - val newContent = content.filterImpl(pred, flipped) - val newContentLength = newContent.length - if (newContentLength == 0) { - SetNode.empty - } else if (newContentLength == 1) { - new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) - } else if (newContent.length == content.length) this - else new HashCollisionSetNode(originalHash, hash, newContent) - } - - override def diff(that: SetNode[A], shift: Int): SetNode[A] = - filterImpl(that.contains(_, originalHash, hash, shift), true) - - override def equals(that: Any): Boolean = - that match { - case node: HashCollisionSetNode[_] => - (this eq node) || - (this.hash == node.hash) && - (this.content.size == node.content.size) && - this.content.forall(node.content.contains) - case _ => false - } - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def copy() = new HashCollisionSetNode[A](originalHash, hash, content) - - override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { - case hc: HashCollisionSetNode[A] => - if (hc eq this) { - this - } else { - var newContent: VectorBuilder[A] = null - val iter = hc.content.iterator - while (iter.hasNext) { - val nextPayload = iter.next() - if (!content.contains(nextPayload)) { - if (newContent eq null) { - newContent = new VectorBuilder() - newContent.addAll(this.content) - } - newContent.addOne(nextPayload) - } - } - if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) - } - case _: BitmapIndexedSetNode[A] => - // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") - } - - override def foreachWithHash(f: (A, Int) => Unit): Unit = { - val iter = content.iterator - while (iter.hasNext) { - val next = iter.next() - f(next.asInstanceOf[A], originalHash) - } - } - - override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { - var stillGoing = true - val iter = content.iterator - while (iter.hasNext && stillGoing) { - val next = iter.next() - stillGoing &&= f(next.asInstanceOf[A], originalHash) - } - stillGoing - } -} - -private final class SetIterator[A](rootNode: SetNode[A]) - extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor += 1 - - payload - } - -} - -private final class SetReverseIterator[A](rootNode: SetNode[A]) - extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] { - - def next(): A = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor -= 1 - - payload - } - -} - -private final class SetHashIterator[A](rootNode: SetNode[A]) - extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] { - private[this] var hash = 0 - override def hashCode(): Int = hash - - def next(): AnyRef = { - if (!hasNext) - throw new NoSuchElementException - - hash = currentValueNode.getHash(currentValueCursor) - currentValueCursor += 1 - this - } - -} - - -/** - * $factoryInfo - * - * @define Coll `immutable.HashSet` - * @define coll immutable champ hash set - */ -@SerialVersionUID(3L) -object HashSet extends IterableFactory[HashSet] { - - @transient - private final val EmptySet = new HashSet(SetNode.empty) - - def empty[A]: HashSet[A] = - EmptySet.asInstanceOf[HashSet[A]] - - def from[A](source: collection.IterableOnce[A]^): HashSet[A] = - source match { - case hs: HashSet[A] => hs - case _ if source.knownSize == 0 => empty[A] - case _ => (newBuilder[A] ++= source).result() - } - - /** Create a new Builder which can be reused after calling `result()` without an - * intermediate call to `clear()` in order to build multiple related results. - */ - def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder -} - -/** Builder for HashSet. - * $multipleResults - */ -private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { - import Node._ - import SetNode._ - - private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) - - /** The last given out HashSet as a return value of `result()`, if any, otherwise null. - * Indicates that on next add, the elements should be copied to an identical structure, before continuing - * mutations. */ - private var aliased: HashSet[A] @uncheckedCaptures = _ - - private def isAliased: Boolean = aliased != null - - /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedSetNode[A] @uncheckedCaptures = newEmptyRootNode - - /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ - private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { - if (ix < 0) throw new ArrayIndexOutOfBoundsException - if (ix > as.length) throw new ArrayIndexOutOfBoundsException - val result = new Array[Int](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } - - /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ - private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { - val dataIx = bm.dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = bm.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) - - bm.dataMap = bm.dataMap | bitpos - bm.content = dst - bm.originalHashes = dstHashes - bm.size += 1 - bm.cachedJavaKeySetHashCode += keyHash - } - - /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ - private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { - val dataIx = bm.dataIndex(bitpos) - val idx = TupleLength * dataIx - bm.content(idx) = elem - } - - def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = - setNode match { - case bm: BitmapIndexedSetNode[A] => - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((bm.dataMap & bitpos) != 0) { - val index = indexFrom(bm.dataMap, mask, bitpos) - val element0 = bm.getPayload(index) - val element0UnimprovedHash = bm.getHash(index) - - if (element0UnimprovedHash == originalHash && element0 == element) { - setValue(bm, bitpos, element0) - } else { - val element0Hash = improve(element0UnimprovedHash) - val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) - bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) - } - } else if ((bm.nodeMap & bitpos) != 0) { - val index = indexFrom(bm.nodeMap, mask, bitpos) - val subNode = bm.getNode(index) - val beforeSize = subNode.size - val beforeHashCode = subNode.cachedJavaKeySetHashCode - update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) - bm.size += subNode.size - beforeSize - bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode - } else { - insertValue(bm, bitpos, element, originalHash, elementHash) - } - case hc: HashCollisionSetNode[A] => - val index = hc.content.indexOf(element) - if (index < 0) { - hc.content = hc.content.appended(element) - } else { - hc.content = hc.content.updated(index, element) - } - } - - /** If currently referencing aliased structure, copy elements to new mutable structure */ - private def ensureUnaliased():Unit = { - if (isAliased) copyElems() - aliased = null - } - - /** Copy elements to new mutable structure */ - private def copyElems(): Unit = { - rootNode = rootNode.copy() - } - - override def result(): HashSet[A] = - if (rootNode.size == 0) { - HashSet.empty - } else if (aliased != null) { - aliased - } else { - aliased = new HashSet(rootNode) - releaseFence() - aliased - } - - override def addOne(elem: A): this.type = { - ensureUnaliased() - val h = elem.## - val im = improve(h) - update(rootNode, elem, h, im, 0) - this - } - - override def addAll(xs: IterableOnce[A]^) = { - ensureUnaliased() - xs match { - case hm: HashSet[A] => - new ChampBaseIterator[SetNode[A]](hm.rootNode) { - while(hasNext) { - val originalHash = currentValueNode.getHash(currentValueCursor) - update( - setNode = rootNode, - element = currentValueNode.getPayload(currentValueCursor), - originalHash = originalHash, - elementHash = improve(originalHash), - shift = 0 - ) - currentValueCursor += 1 - } - }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position - case other => - val it = other.iterator - while(it.hasNext) addOne(it.next()) - } - - this - } - - override def clear(): Unit = { - aliased = null - if (rootNode.size > 0) { - // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty - rootNode = newEmptyRootNode - } - } - - private[collection] def size: Int = rootNode.size - - override def knownSize: Int = rootNode.size -} diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala deleted file mode 100644 index d7077845b845..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/IntMap.scala +++ /dev/null @@ -1,504 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import scala.collection.generic.{BitOperations, DefaultSerializationProxy} -import scala.collection.mutable.{Builder, ImmutableBuilder} -import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance -import scala.language.implicitConversions -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Utility class for integer maps. - */ -private[immutable] object IntMapUtils extends BitOperations.Int { - def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) - - def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) - else IntMap.Bin(p, m, t2, t1) - } - - def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { - case (left, IntMap.Nil) => left - case (IntMap.Nil, right) => right - case (left, right) => IntMap.Bin(prefix, mask, left, right) - } -} - -import IntMapUtils._ - -/** A companion object for integer maps. - * - * @define Coll `IntMap` - */ -object IntMap { - def empty[T] : IntMap[T] = IntMap.Nil - - def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) - - def apply[T](elems: (Int, T)*): IntMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - - def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] = - newBuilder[V].addAll(coll).result() - - private[immutable] case object Nil extends IntMap[Nothing] { - // Important! Without this equals method in place, an infinite - // loop from Map.equals => size => pattern-match-on-Nil => equals - // develops. Case objects and custom equality don't mix without - // careful handling. - override def equals(that : Any) = that match { - case _: this.type => true - case _: IntMap[_] => false // The only empty IntMaps are eq Nil - case _ => super.equals(that) - } - } - - private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] - else IntMap.Tip(key, s) - } - - private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { - def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] - else IntMap.Bin[S](prefix, mask, left, right) - } - } - - def newBuilder[V]: Builder[(Int, V), IntMap[V]] = - new ImmutableBuilder[(Int, V), IntMap[V]](empty) { - def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } - } - - implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it) - def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] - } - - implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] - private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it) - def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] - } - - implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) - implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) -} - -// Iterator over a non-empty IntMap. -private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { - - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and - // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 33 and - var index = 0 - var buffer = new Array[AnyRef](33) - - def pop = { - index -= 1 - buffer(index).asInstanceOf[IntMap[V]] - } - - def push(x: IntMap[V]): Unit = { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - push(it) - - /** - * What value do we assign to a tip? - */ - def valueOf(tip: IntMap.Tip[V]): T - - def hasNext = index != 0 - @tailrec - final def next(): T = - pop match { - case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { - push(right) - valueOf(t) - } - case IntMap.Bin(_, _, left, right) => { - push(right) - push(left) - next() - } - case t@IntMap.Tip(_, _) => valueOf(t) - // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap - // and don't return an IntMapIterator for IntMap.Nil. - case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") - } -} - -private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { - def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) -} - -private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { - def valueOf(tip: IntMap.Tip[V]) = tip.value -} - -private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { - def valueOf(tip: IntMap.Tip[V]) = tip.key -} - -import IntMap._ - -/** Specialised immutable map structure for integer keys, based on - * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * '''Note:''' This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with integer keys. - * - * @define Coll `immutable.IntMap` - * @define coll immutable integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class IntMap[+T] extends AbstractMap[Int, T] - with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] - with Serializable { - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] = - intMapFrom[T](coll) - protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = { - val b = IntMap.newBuilder[V2] - b.sizeHint(coll) - b.addAll(coll) - b.result() - } - override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = - new ImmutableBuilder[(Int, T), IntMap[T]](empty) { - def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } - } - - override def empty: IntMap[T] = IntMap.Nil - - override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Int, T) @uncheckedCaptures] - foreach(buffer += _) - buffer.toList - } - - /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of integer keys and corresponding values. - */ - def iterator: Iterator[(Int, T)] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapEntryIterator(this) - } - - /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ - override final def foreach[U](f: ((Int, T)) => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case IntMap.Tip(key, value) => f((key, value)) - case IntMap.Nil => - } - - override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } - case IntMap.Tip(key, value) => f(key, value) - case IntMap.Nil => - } - - override def keysIterator: Iterator[Int] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapKeyIterator(this) - } - - /** - * Loop over the keys of the map. The same as `keys.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey[U](f: Int => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } - case IntMap.Tip(key, _) => f(key) - case IntMap.Nil => - } - - override def valuesIterator: Iterator[T] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapValueIterator(this) - } - - /** - * Loop over the values of the map. The same as `values.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue[U](f: T => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } - case IntMap.Tip(_, value) => f(value) - case IntMap.Nil => - } - - override protected[this] def className = "IntMap" - - override def isEmpty = this eq IntMap.Nil - override def knownSize: Int = if (isEmpty) 0 else super.knownSize - override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)) - if ((left eq newleft) && (right eq newright)) this - else bin(prefix, mask, newleft, newright) - } - case IntMap.Tip(key, value) => - if (f((key, value))) this - else IntMap.Nil - case IntMap.Nil => IntMap.Nil - } - - override def transform[S](f: (Int, T) => S): IntMap[S] = this match { - case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) - case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) - case IntMap.Nil => IntMap.Nil - } - - final override def size: Int = this match { - case IntMap.Nil => 0 - case IntMap.Tip(_, _) => 1 - case IntMap.Bin(_, _, left, right) => left.size + right.size - } - - @tailrec - final def get(key: Int): Option[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) - case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None - case IntMap.Nil => None - } - - @tailrec - final override def getOrElse[S >: T](key: Int, default: => S): S = this match { - case IntMap.Nil => default - case IntMap.Tip(key2, value) => if (key == key2) value else default - case IntMap.Bin(prefix, mask, left, right) => - if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) - } - - @tailrec - final override def apply(key: Int): T = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") - case IntMap.Nil => throw new IllegalArgumentException("key not found") - } - - override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) - - override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) - else IntMap.Bin(prefix, mask, left, right.updated(key, value)) - case IntMap.Tip(key2, value2) => - if (key == key2) IntMap.Tip(key, value) - else join(key, IntMap.Tip(key, value), key2, this) - case IntMap.Nil => IntMap.Tip(key, value) - } - - def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) - - def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) - - override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = - super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such - - override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that) - - def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = - strictOptimizedCollect(IntMap.newBuilder[V2], pf) - - /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to: - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update - * @param value The value to use if there is no conflict - * @param f The function used to resolve conflicts. - * @return The updated map. - */ - def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case IntMap.Tip(key2, value2) => - if (key == key2) IntMap.Tip(key, f(value2, value)) - else join(key, IntMap.Tip(key, value), key2, this) - case IntMap.Nil => IntMap.Tip(key, value) - } - - def removed (key: Int): IntMap[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this - else if (zero(key, mask)) bin(prefix, mask, left - key, right) - else bin(prefix, mask, left, right - key) - case IntMap.Tip(key2, _) => - if (key == key2) IntMap.Nil - else this - case IntMap.Nil => IntMap.Nil - } - - /** - * A combined transform and filter function. Returns an `IntMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] - else bin(prefix, mask, newleft, newright) - case IntMap.Tip(key, value) => f(key, value) match { - case None => - IntMap.Nil - case Some(value2) => - //hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] - else IntMap.Tip(key, value2) - } - case IntMap.Nil => - IntMap.Nil - } - - /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ - def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ - case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) - else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) - else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) - } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) - else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) - else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) - } - else { - if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join(p1, this, p2, that) - } - case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) - case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) - case (IntMap.Nil, x) => x - case (x, IntMap.Nil) => x - } - - /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ - def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { - case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) IntMap.Nil - else if (zero(p2, m1)) l1.intersectionWith(that, f) - else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { - if (!hasMatch(p1, p2, m2)) IntMap.Nil - else if (zero(p1, m2)) this.intersectionWith(l2, f) - else this.intersectionWith(r2, f) - } - case (IntMap.Tip(key, value), that) => that.get(key) match { - case None => IntMap.Nil - case Some(value2) => IntMap.Tip(key, f(key, value, value2)) - } - case (_, IntMap.Tip(key, value)) => this.get(key) match { - case None => IntMap.Nil - case Some(value2) => IntMap.Tip(key, f(key, value2, value)) - } - case (_, _) => IntMap.Nil - } - - /** - * Left biased intersection. Returns the map that has all the same mappings - * as this but only for keys which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ - def intersection[R](that: IntMap[R]): IntMap[T] = - this.intersectionWith(that, (key: Int, value: T, value2: R) => value) - - def ++[S >: T](that: IntMap[S]) = - this.unionWith[S](that, (key, x, y) => y) - - /** - * The entry with the lowest key value considered in unsigned order. - */ - @tailrec - final def firstKey: Int = this match { - case Bin(_, _, l, r) => l.firstKey - case Tip(k, v) => k - case IntMap.Nil => throw new IllegalStateException("Empty set") - } - - /** - * The entry with the highest key value considered in unsigned order. - */ - @tailrec - final def lastKey: Int = this match { - case Bin(_, _, l, r) => r.lastKey - case Tip(k, v) => k - case IntMap.Nil => throw new IllegalStateException("Empty set") - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) -} diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index c4f9900eea8b..44f13d0f2895 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -32,7 +32,7 @@ trait Iterable[+A] extends collection.Iterable[A] @SerialVersionUID(3L) object Iterable extends IterableFactory.Delegate[Iterable](List) { - override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match { + override def from[E](it: IterableOnce[E]): Iterable[E] = it match { case iterable: Iterable[E] => iterable case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala deleted file mode 100644 index 5684130b6048..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala +++ /dev/null @@ -1,1376 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.io.{ObjectInputStream, ObjectOutputStream} -import java.lang.{StringBuilder => JStringBuilder} - -import scala.annotation.tailrec -import scala.collection.generic.SerializeEnd -import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} -import scala.language.implicitConversions -import scala.runtime.Statics -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** This class implements an immutable linked list. We call it "lazy" - * because it computes its elements only when they are needed. - * - * The class extends Iterable; it is a replacement for LazyList, which - * which implemented Seq. The reason is that under capture checking, we - * assume that all Seqs are strict, and LazyList broke that assumption. - * As a consequence, we declare LazyList is deprecated and unsafe for - * capture checking, and replace it by the current class, LazyListIterable. - * - * Elements are memoized; that is, the value of each element is computed at most once. - * - * Elements are computed in-order and are never skipped. In other words, - * accessing the tail causes the head to be computed first. - * - * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you - * don't know yet whether the list is empty or not. If you learn that it is non-empty, - * then you also know that the head has been computed. But the tail is itself - * a `LazyListIterable`, whose emptiness-or-not might remain undetermined. - * - * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains - * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, - * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. - * - * Here is an example: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * val fibs: LazyListIterable[BigInt] = - * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } - * fibs.take(5).foreach(println) - * } - * - * // prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * }}} - * - * To illustrate, let's add some output to the definition `fibs`, so we - * see what's going on. - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * val fibs: LazyListIterable[BigInt] = - * BigInt(0) #:: BigInt(1) #:: - * fibs.zip(fibs.tail).map{ n => - * println(s"Adding \${n._1} and \${n._2}") - * n._1 + n._2 - * } - * fibs.take(5).foreach(println) - * fibs.take(6).foreach(println) - * } - * - * // prints - * // - * // 0 - * // 1 - * // Adding 0 and 1 - * // 1 - * // Adding 1 and 1 - * // 2 - * // Adding 1 and 2 - * // 3 - * - * // And then prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * // Adding 2 and 3 - * // 5 - * }}} - * - * Note that the definition of `fibs` uses `val` not `def`. The memoization of the - * `LazyListIterable` requires us to have somewhere to store the information and a `val` - * allows us to do that. - * - * Further remarks about the semantics of `LazyListIterable`: - * - * - Though the `LazyListIterable` changes as it is accessed, this does not - * contradict its immutability. Once the values are memoized they do - * not change. Values that have yet to be memoized still "exist", they - * simply haven't been computed yet. - * - * - One must be cautious of memoization; it can eat up memory if you're not - * careful. That's because memoization of the `LazyListIterable` creates a structure much like - * [[scala.collection.immutable.List]]. As long as something is holding on to - * the head, the head holds on to the tail, and so on recursively. - * If, on the other hand, there is nothing holding on to the head (e.g. if we used - * `def` to define the `LazyListIterable`) then once it is no longer being used directly, - * it disappears. - * - * - Note that some operations, including [[drop]], [[dropWhile]], - * [[flatMap]] or [[collect]] may process a large number of intermediate - * elements before returning. - * - * Here's another example. Let's start with the natural numbers and iterate - * over them. - * - * {{{ - * // We'll start with a silly iteration - * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { - * // Stop after 200,000 - * if (i < 200001) { - * if (i % 50000 == 0) println(s + i) - * loop(s, iter.next(), iter) - * } - * } - * - * // Our first LazyListIterable definition will be a val definition - * val lazylist1: LazyListIterable[Int] = { - * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) - * loop(0) - * } - * - * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1 - * val it1 = lazylist1.iterator - * loop("Iterator1: ", it1.next(), it1) - * - * // We can redefine this LazyListIterable such that all we have is the Iterator left - * // and allow the LazyListIterable to be garbage collected as required. Using a def - * // to provide the LazyListIterable ensures that no val is holding onto the head as - * // is the case with lazylist1 - * def lazylist2: LazyListIterable[Int] = { - * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) - * loop(0) - * } - * val it2 = lazylist2.iterator - * loop("Iterator2: ", it2.next(), it2) - * - * // And, of course, we don't actually need a LazyListIterable at all for such a simple - * // problem. There's no reason to use a LazyListIterable if you don't actually need - * // one. - * val it3 = new Iterator[Int] { - * var i = -1 - * def hasNext = true - * def next(): Int = { i += 1; i } - * } - * loop("Iterator3: ", it3.next(), it3) - * }}} - * - * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. - * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic. - * If we defined `fibs` such that only `0` were concretely known, then the act - * of determining `tail` would require the evaluation of `tail`, so the - * computation would be unable to progress, as in this code: - * {{{ - * // The first time we try to access the tail we're going to need more - * // information which will require us to recurse, which will require us to - * // recurse, which... - * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * }}} - * - * The definition of `fibs` above creates a larger number of objects than - * necessary depending on how you might want to implement it. The following - * implementation provides a more "cost effective" implementation due to the - * fact that it has a more direct route to the numbers themselves: - * - * {{{ - * lazy val fib: LazyListIterable[Int] = { - * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n) - * loop(1, 1) - * } - * }}} - * - * The head, the tail and whether the list is empty or not can be initially unknown. - * Once any of those are evaluated, they are all known, though if the tail is - * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating - * the tails content is deferred until the tails empty status, head or tail is - * evaluated. - * - * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed - * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`. - * - * Only when it's further evaluated (which may be never!) any of the elements gets - * forced. - * - * for example: - * - * {{{ - * def tailWithSideEffect: LazyListIterable[Nothing] = { - * println("getting empty LazyListIterable") - * LazyListIterable.empty - * } - * - * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable" - * - * val suspended = 1 #:: tailWithSideEffect // doesn't print anything - * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed - * val filtered = tail.filter(_ => false) // still nothing is printed - * filtered.isEmpty // prints "getting empty LazyListIterable" - * }}} - * - * @tparam A the type of the elements contained in this lazy list. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] - * section on `LazyLists` for more information. - * @define Coll `LazyListIterable` - * @define coll lazy list - * @define orderDependent - * @define orderDependentFold - * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, - * `appendedAll`, `lazyAppendedAll`) without forcing any of the - * intermediate resulting lazy lists may overflow the stack when - * the final result is forced. - * @define preservesLaziness This method preserves laziness; elements are only evaluated - * individually as needed. - * @define initiallyLazy This method does not evaluate anything until an operation is performed - * on the result (e.g. calling `head` or `tail`, or checking if it is empty). - * @define evaluatesAllElements This method evaluates all elements of the collection. - */ -@SerialVersionUID(3L) -final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) - extends AbstractIterable[A] - with Iterable[A] - with IterableOps[A, LazyListIterable, LazyListIterable[A]] - with IterableFactoryDefaults[A, LazyListIterable] - with Serializable { - this: LazyListIterable[A]^ => - import LazyListIterable._ - - @volatile private[this] var stateEvaluated: Boolean = false - @inline private def stateDefined: Boolean = stateEvaluated - private[this] var midEvaluation = false - - private lazy val state: State[A]^ = { - // if it's already mid-evaluation, we're stuck in an infinite - // self-referential loop (also it's empty) - if (midEvaluation) { - throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") - } - midEvaluation = true - val res = try lazyState() finally midEvaluation = false - // if we set it to `true` before evaluating, we may infinite loop - // if something expects `state` to already be evaluated - stateEvaluated = true - lazyState = null // allow GC - res - } - - override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable - - override def isEmpty: Boolean = state eq State.Empty - - /** @inheritdoc - * - * $preservesLaziness - */ - override def knownSize: Int = if (knownIsEmpty) 0 else -1 - - override def head: A = state.head - - override def tail: LazyListIterable[A]^{this} = state.tail - - @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) - @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) - - /** Evaluates all undefined elements of the lazy list. - * - * This method detects cycles in lazy lists, and terminates after all - * elements of the cycle are evaluated. For example: - * - * {{{ - * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring - * ring.force - * ring.toString - * - * // prints - * // - * // LazyListIterable(1, 2, 3, ...) - * }}} - * - * This method will *not* terminate for non-cyclic infinite-sized collections. - * - * @return this - */ - def force: this.type = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyListIterable[A]^{this} = this - if (!these.isEmpty) { - these = these.tail - } - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - /** @inheritdoc - * - * The iterator returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def iterator: Iterator[A]^{this} = - if (knownIsEmpty) Iterator.empty - else new LazyIterator(this) - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyListIterable as elements - * are consumed. - * @note This function will force the realization of the entire LazyListIterable - * unless the `f` throws an exception. - */ - @tailrec - override def foreach[U](f: A => U): Unit = { - if (!isEmpty) { - f(head) - tail.foreach(f) - } - } - - /** LazyListIterable specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyListIterable`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override def foldLeft[B](z: B)(op: (B, A) => B): B = - if (isEmpty) z - else tail.foldLeft(op(z, head))(op) - - // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef^{this} = - if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this - - override protected[this] def className = "LazyListIterable" - - /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. - * - * $preservesLaziness - * - * $appendStackSafety - * - * @param suffix The collection that gets appended to this lazy list - * @return The lazy list containing elements of this lazy list and the iterable object. - */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = - newLL { - if (isEmpty) suffix match { - case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable - case coll if coll.knownSize == 0 => State.Empty - case coll => stateFromIterator(coll.iterator) - } - else sCons(head, tail lazyAppendedAll suffix) - } - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = - if (knownIsEmpty) LazyListIterable.from(suffix) - else lazyAppendedAll(suffix) - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - def appended[B >: A](elem: B): LazyListIterable[B]^{this} = - if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty)) - else lazyAppendedAll(Iterator.single(elem)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} = - if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty)) - else newLL(scanLeftState(z)(op)) - - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = - sCons( - z, - newLL { - if (isEmpty) State.Empty - else tail.scanLeftState(op(z, head))(op) - } - ) - - /** LazyListIterable specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyListIterable`. - * @return The accumulated value from successive applications of `f`. - */ - override def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left: LazyListIterable[A]^{this} = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = { - val (left, right) = map(f).partition(_.isLeft) - (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.filterImpl(this, pred, isFlipped = false) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.filterImpl(this, pred, isFlipped = true) - - /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. - * - * This method is not particularly useful for a lazy list, as [[filter]] already preserves - * laziness. - * - * The `collection.WithFilter` returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} = - new LazyListIterable.WithFilter(coll, p) - - /** @inheritdoc - * - * $preservesLaziness - */ - def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this)) - - /** @inheritdoc - * - * $preservesLaziness - */ - def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} = - if (knownIsEmpty) LazyListIterable.from(prefix) - else if (prefix.knownSize == 0) this - else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def map[B](f: A => B): LazyListIterable[B]^{this, f} = - if (knownIsEmpty) LazyListIterable.empty - else (mapImpl(f): @inline) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a } - - private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} = - newLL { - if (isEmpty) State.Empty - else sCons(f(head), tail.mapImpl(f)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.collectImpl(this, pf) - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element for which the partial function is defined. - */ - @tailrec - override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = - if (isEmpty) None - else { - val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B]) - if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) - else Some(res) - } - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element matching the predicate. - */ - @tailrec - override def find(p: A => Boolean): Option[A] = - if (isEmpty) None - else { - val elem = head - if (p(elem)) Some(elem) - else tail.find(p) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - // optimisations are not for speed, but for functionality - // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.flatMapImpl(this, f) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} = - if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty - else newLL(zipState(that.iterator)) - - private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = - if (this.isEmpty || !it.hasNext) State.Empty - else sCons((head, it.next()), newLL { tail zipState it }) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = { - if (this.knownIsEmpty) { - if (that.knownSize == 0) LazyListIterable.empty - else LazyListIterable.continually(thisElem) zip that - } else { - if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem)) - else newLL(zipAllState(that.iterator, thisElem, thatElem)) - } - } - - private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { - if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it }) - else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) - } else { - if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem)) - } - } - - /** @inheritdoc - * - * This method is not particularly useful for a lazy list, as [[zip]] already preserves - * laziness. - * - * The `collection.LazyZip2` returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - // just in case it can be meaningfully overridden at some point - override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} = - super.lazyZip(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) = - (map(asPair(_)._1), map(asPair(_)._2)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) = - (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all except the first `n` elements. - */ - override def drop(n: Int): LazyListIterable[A]^{this} = - if (n <= 0) this - else if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.dropImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all elements after the predicate returns `false`. - */ - override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.dropWhileImpl(this, p) - - /** @inheritdoc - * - * $initiallyLazy - */ - override def dropRight(n: Int): LazyListIterable[A]^{this} = { - if (n <= 0) this - else if (knownIsEmpty) LazyListIterable.empty - else newLL { - var scout = this - var remaining = n - // advance scout n elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - remaining -= 1 - scout = scout.tail - } - dropRightState(scout) - } - } - - private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} = - if (scout.isEmpty) State.Empty - else sCons(head, newLL(tail.dropRightState(scout.tail))) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def take(n: Int): LazyListIterable[A] = - if (knownIsEmpty) LazyListIterable.empty - else (takeImpl(n): @inline) - - private def takeImpl(n: Int): LazyListIterable[A] = { - if (n <= 0) LazyListIterable.empty - else newLL { - if (isEmpty) State.Empty - else sCons(head, tail.takeImpl(n - 1)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = - if (knownIsEmpty) LazyListIterable.empty - else (takeWhileImpl(p): @inline) - - private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} = - newLL { - if (isEmpty || !p(head)) State.Empty - else sCons(head, tail.takeWhileImpl(p)) - } - - /** @inheritdoc - * - * $initiallyLazy - */ - override def takeRight(n: Int): LazyListIterable[A]^{this} = - if (n <= 0 || knownIsEmpty) LazyListIterable.empty - else LazyListIterable.takeRightImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all but the first `from` elements. - */ - override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from) - - /** @inheritdoc - * - * $evaluatesAllElements - */ - def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty) - - // need contravariant type B to make the compiler happy - still returns LazyListIterable[A] - @tailrec - private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] = - if (isEmpty) tl - else tail.reverseOnto(newLL(sCons(head, tl))) - - @tailrec - private def lengthGt(len: Int): Boolean = - if (len < 0) true - else if (isEmpty) false - else tail.lengthGt(len - 1) - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * a single element ahead of the iterator is evaluated. - */ - override def grouped(size: Int): Iterator[LazyListIterable[A]] = { - require(size > 0, "size must be positive, but was " + size) - slidingImpl(size = size, step = size) - } - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * `size - step max 1` elements ahead of the iterator are evaluated. - */ - override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { - require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") - slidingImpl(size = size, step = step) - } - - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = - if (knownIsEmpty) Iterator.empty - else new SlidingIterator[A](this, size = size, step = step) - - /** @inheritdoc - * - * $preservesLaziness - */ - def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = { - if (len <= 0) this - else newLL { - if (isEmpty) LazyListIterable.fill(len)(elem).state - else sCons(head, tail.padTo(len - 1, elem)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = - if (knownIsEmpty) LazyListIterable from other - else patchImpl(from, other, replaced) - - private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = - newLL { - if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state) - else if (isEmpty) stateFromIterator(other.iterator) - else sCons(head, tail.patchImpl(from - 1, other, replaced)) - } - - /** @inheritdoc - * - * $evaluatesAllElements - */ - // overridden just in case a lazy implementation is developed at some point - override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose - - /** @inheritdoc - * - * $preservesLaziness - */ - def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} = - if (index < 0) throw new IndexOutOfBoundsException(s"$index") - else updatedImpl(index, elem, index) - - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = { - newLL { - if (index <= 0) sCons(elem, tail) - else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) - else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) - } - } - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. - * - * $evaluatesAllElements - * - * @param sb the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { - force - addStringNoForce(sb.underlying, start, sep, end) - sb - } - - private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { - b.append(start) - if (!stateDefined) b.append("") - else if (!isEmpty) { - b.append(head) - var cursor = this - inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) - var scout = tail - inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty - if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { - cursor = scout - if (scoutNonEmpty) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { - appendCursorElement() - cursor = cursor.tail - scout = scout.tail - if (scoutNonEmpty) scout = scout.tail - } - } - } - if (!scoutNonEmpty) { // Not a cycle, scout hit an end - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - // if cursor (eq scout) has state defined, it is empty; else unknown state - if (!cursor.stateDefined) b.append(sep).append("") - } else { - @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state) - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (!same(runner, scout)) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if (same(cursor, scout) && (k > 0)) { - appendCursorElement() - cursor = cursor.tail - } - while (!same(cursor, scout)) { - appendCursorElement() - cursor = cursor.tail - } - b.append(sep).append("") - } - } - b.append(end) - } - - /** $preservesLaziness - * - * @return a string representation of this collection. An undefined state is - * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` - * - * Examples: - * - * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains - * a cycle at the fourth element. - */ - override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString - - /** @inheritdoc - * - * $preservesLaziness - */ - @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") - override def hasDefiniteSize: Boolean = { - if (!stateDefined) false - else if (isEmpty) true - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } -} - -/** - * $factoryInfo - * @define coll lazy list - * @define Coll `LazyListIterable` - */ -@SerialVersionUID(3L) -object LazyListIterable extends IterableFactory[LazyListIterable] { - // Eagerly evaluate cached empty instance - private[this] val _empty = newLL(State.Empty).force - - private sealed trait State[+A] extends Serializable { - this: State[A]^ => - def head: A - def tail: LazyListIterable[A]^ - } - - private object State { - @SerialVersionUID(3L) - object Empty extends State[Nothing] { - def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") - } - - @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A] - } - - /** Creates a new LazyListIterable. */ - @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state) - - /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) - - private val anyToMarker: Any => Any = _ => Statics.pfMarker - - /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into - * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they - * can continue their execution where they left off. - */ - - private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[filterImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - var elem: A = null.asInstanceOf[A] - var found = false - var rest = restRef // var rest = restRef.elem - while (!found && !rest.isEmpty) { - elem = rest.head - found = p(elem) != isFlipped - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty - } - } - - private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[collectImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - val marker = Statics.pfMarker - val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased - - var res: B = marker.asInstanceOf[B] // safe because B is unbounded - var rest = restRef // var rest = restRef.elem - while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { - res = pf.applyOrElse(rest.head, toMarker) - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (res.asInstanceOf[AnyRef] eq marker) State.Empty - else sCons(res, collectImpl(rest, pf)) - } - } - - private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[flatMapImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - var it: Iterator[B @uncheckedCaptures]^{ll, f} = null - var itHasNext = false - var rest = restRef // var rest = restRef.elem - while (!itHasNext && !rest.isEmpty) { - it = f(rest.head).iterator - itHasNext = it.hasNext - if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw - rest = rest.tail - restRef = rest // restRef.elem = rest - } - } - if (itHasNext) { - val head = it.next() - rest = rest.tail - restRef = rest // restRef.elem = rest - sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) - } else State.Empty - } - } - - private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - var iRef = n // val iRef = new IntRef(n) - newLL { - var rest = restRef // var rest = restRef.elem - var i = iRef // var i = iRef.elem - while (i > 0 && !rest.isEmpty) { - rest = rest.tail - restRef = rest // restRef.elem = rest - i -= 1 - iRef = i // iRef.elem = i - } - rest.state - } - } - - private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropWhileImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - var rest = restRef // var rest = restRef.elem - while (!rest.isEmpty && p(rest.head)) { - rest = rest.tail - restRef = rest // restRef.elem = rest - } - rest.state - } - } - - private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - var scoutRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // same situation - var remainingRef = n // val remainingRef = new IntRef(n) - newLL { - var scout = scoutRef // var scout = scoutRef.elem - var remaining = remainingRef // var remaining = remainingRef.elem - // advance `scout` `n` elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - remaining -= 1 - remainingRef = remaining // remainingRef.elem = remaining - } - var rest = restRef // var rest = restRef.elem - // advance `rest` and `scout` in tandem until `scout` reaches the end - while(!scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail - restRef = rest // restRef.elem = rest - } - // `rest` is the last `n` elements (or all of them) - rest.state - } - } - - /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl). - */ - object cons { - /** A lazy list consisting of a given first element and remaining elements - * @param hd The first element of the result lazy list - * @param tl The remaining elements of the result lazy list - */ - def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) - - /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs) - } - - extension [A](l: => LazyListIterable[A]) - /** Construct a LazyListIterable consisting of a given first element followed by elements - * from another LazyListIterable. - */ - def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) - - /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and - * another LazyListIterable. - */ - def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l - - object #:: { - def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] = - if (!s.isEmpty) Some((s.head, s.tail)) else None - } - - def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match { - case lazyList: LazyListIterable[A] => lazyList - case _ if coll.knownSize == 0 => empty[A] - case _ => newLL(stateFromIterator(coll.iterator)) - } - - def empty[A]: LazyListIterable[A] = _empty - - /** Creates a State from an Iterator, with another State appended after the Iterator - * is empty. - */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} = - if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) - else suffix - - /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} = - if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) - else State.Empty - - override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] = - if (xss.knownSize == 0) empty - else newLL(concatIterator(xss.iterator)) - - private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} = - if (!it.hasNext) State.Empty - else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) - - /** An infinite LazyListIterable that repeatedly applies a given function to a start value. - * - * @param start the start value of the LazyListIterable - * @param f the function that's repeatedly applied - * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} = - newLL { - val head = start - sCons(head, iterate(f(head))(f)) - } - - /** - * Create an infinite LazyListIterable starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the LazyListIterable - * @param step the increment value of the LazyListIterable - * @return the LazyListIterable starting at value `start`. - */ - def from(start: Int, step: Int): LazyListIterable[Int] = - newLL(sCons(start, from(start + step, step))) - - /** - * Create an infinite LazyListIterable starting at `start` and incrementing by `1`. - * - * @param start the start value of the LazyListIterable - * @return the LazyListIterable starting at value `start`. - */ - def from(start: Int): LazyListIterable[Int] = from(start, 1) - - /** - * Create an infinite LazyListIterable containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting LazyListIterable - * @return the LazyListIterable containing an infinite number of elem - */ - def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem))) - - override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} = - if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - - override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = { - def at(index: Int): LazyListIterable[A]^{f} = - if (index < n) newLL(sCons(f(index), at(index + 1))) else empty - - at(0) - } - - // significantly simpler than the iterator returned by Iterator.unfold - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} = - newLL { - f(init) match { - case Some((elem, state)) => sCons(elem, unfold(state)(f)) - case None => State.Empty - } - } - - /** The builder returned by this method only evaluates elements - * of collections added to it as needed. - * - * @tparam A the type of the ${coll}’s elements - * @return A builder for $Coll objects. - */ - def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - - private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { - override def hasNext: Boolean = !lazyList.isEmpty - - override def next(): A = - if (lazyList.isEmpty) Iterator.empty.next() - else { - val res = lazyList.head - lazyList = lazyList.tail - res - } - } - - private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) - extends AbstractIterator[LazyListIterable[A]] { - this: SlidingIterator[A]^ => - private val minLen = size - step max 0 - private var first = true - - def hasNext: Boolean = - if (first) !lazyList.isEmpty - else lazyList.lengthGt(minLen) - - def next(): LazyListIterable[A] = { - if (!hasNext) Iterator.empty.next() - else { - first = false - val list = lazyList - lazyList = list.drop(step) - list.take(size) - } - } - } - - private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean) - extends collection.WithFilter[A, LazyListIterable] { - this: WithFilter[A]^ => - private[this] val filtered = lazyList.filter(p) - def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f) - def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q) - } - - private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] { - import LazyBuilder._ - - private[this] var next: DeferredState[A @uncheckedCaptures] = _ - private[this] var list: LazyListIterable[A @uncheckedCaptures] = _ - - clear() - - override def clear(): Unit = { - val deferred = new DeferredState[A] - list = newLL(deferred.eval()) - next = deferred - } - - override def result(): LazyListIterable[A] = { - next init State.Empty - list - } - - override def addOne(elem: A): this.type = { - val deferred = new DeferredState[A] - next init sCons(elem, newLL(deferred.eval())) - next = deferred - this - } - - // lazy implementation which doesn't evaluate the collection being added - override def addAll(xs: IterableOnce[A]^): this.type = { - if (xs.knownSize != 0) { - val deferred = new DeferredState[A] - next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval())) - next = deferred - } - this - } - } - - private object LazyBuilder { - final class DeferredState[A] { - this: DeferredState[A]^ => - private[this] var _state: (() => State[A]^) @uncheckedCaptures = _ - - def eval(): State[A]^ = { - val state = _state - if (state == null) throw new IllegalStateException("uninitialized") - state() - } - - // racy - def init(state: => State[A]^): Unit = { - if (_state != null) throw new IllegalStateException("already initialized") - _state = () => state - } - } - } - - /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. - * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses - * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization - * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. - */ - @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyListIterable[A]^) extends Serializable { - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - var these = coll - while (these.knownNonEmpty) { - out.writeObject(these.head) - these = these.tail - } - out.writeObject(SerializeEnd) - out.writeObject(these) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val init = new mutable.ListBuffer[A @uncheckedCaptures] - var initRead = false - while (!initRead) in.readObject match { - case SerializeEnd => initRead = true - case a => init += a.asInstanceOf[A] - } - val tail = in.readObject().asInstanceOf[LazyListIterable[A]] - // scala/scala#10118: caution that no code path can evaluate `tail.state` - // before the resulting LazyListIterable is returned - val it = init.toList.iterator - coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) - } - - private[this] def readResolve(): Any = coll - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala deleted file mode 100644 index c5000d785144..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ListMap.scala +++ /dev/null @@ -1,373 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import scala.collection.mutable.ReusableBuilder -import scala.collection.generic.DefaultSerializable -import scala.runtime.Statics.releaseFence -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** - * This class implements immutable maps using a list-based data structure. List map iterators and - * traversal methods visit key-value pairs in the order they were first inserted. - * - * Entries are stored internally in reversed insertion order, which means the newest key is at the - * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` - * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes - * this collection suitable only for a small number of elements. - * - * Instances of `ListMap` represent empty maps; they can be either created by calling the - * constructor directly, or by applying the function `ListMap.empty`. - * - * @tparam K the type of the keys contained in this list map - * @tparam V the type of the values associated with the keys - * - * @define Coll ListMap - * @define coll list map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class ListMap[K, +V] - extends AbstractMap[K, V] - with SeqMap[K, V] - with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] - with MapFactoryDefaults[K, V, ListMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[ListMap] = ListMap - - override def size: Int = 0 - - override def isEmpty: Boolean = true - - override def knownSize: Int = 0 - def get(key: K): Option[V] = None - - def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) - - def removed(key: K): ListMap[K, V] = this - - def iterator: Iterator[(K, V)] = { - var curr: ListMap[K, V] = this - var res: List[(K, V)] = Nil - while (curr.nonEmpty) { - res = (curr.key, curr.value) :: res - curr = curr.next - } - res.iterator - } - - override def keys: Iterable[K] = { - var curr: ListMap[K, V] = this - var res: List[K] = Nil - while (curr.nonEmpty) { - res = curr.key :: res - curr = curr.next - } - res - } - - override def hashCode(): Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration - // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. - val _reversed = new immutable.AbstractMap[K, V] { - override def isEmpty: Boolean = ListMap.this.isEmpty - override def removed(key: K): Map[K, V] = ListMap.this.removed(key) - override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) - override def get(key: K): Option[V] = ListMap.this.get(key) - override def iterator: Iterator[(K, V)] = ListMap.this.iterator - override def foreachEntry[U](f: (K, V) => U): Unit = { - var curr: ListMap[K, V] = ListMap.this - while (curr.nonEmpty) { - f(curr.key, curr.value) - curr = curr.next - } - } - } - MurmurHash3.mapHash(_reversed) - } - } - - private[immutable] def key: K = throw new NoSuchElementException("key of empty map") - private[immutable] def value: V = throw new NoSuchElementException("value of empty map") - private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") - - override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) - override protected[this] def className = "ListMap" - -} - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list map with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] - * section on `List Maps` for more information. - * @define Coll ListMap - * @define coll list map - */ -@SerialVersionUID(3L) -object ListMap extends MapFactory[ListMap] { - /** - * Represents an entry in the `ListMap`. - */ - private[immutable] final class Node[K, V]( - override private[immutable] val key: K, - private[immutable] var _value: V @uncheckedCaptures, - private[immutable] var _init: ListMap[K, V] @uncheckedCaptures - ) extends ListMap[K, V] { - releaseFence() - - override private[immutable] def value: V = _value - - override def size: Int = sizeInternal(this, 0) - - @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = - if (cur.isEmpty) acc - else sizeInternal(cur.next, acc + 1) - - override def isEmpty: Boolean = false - - override def knownSize: Int = -1 - - @throws[NoSuchElementException] - override def apply(k: K): V = applyInternal(this, k) - - @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = - if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) - else if (k == cur.key) cur.value - else applyInternal(cur.next, k) - - override def get(k: K): Option[V] = getInternal(this, k) - - @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = - if (cur.isEmpty) None - else if (k == cur.key) Some(cur.value) - else getInternal(cur.next, k) - - override def contains(k: K): Boolean = containsInternal(this, k) - - @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = - if (cur.isEmpty) false - else if (k == cur.key) true - else containsInternal(cur.next, k) - - override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { - - var index = -1 // the index (in reverse) where the key to update exists, if it is found - var found = false // true if the key is found int he map - var isDifferent = false // true if the key was found and the values are different - - { - var curr: ListMap[K, V] = this - - while (curr.nonEmpty && !found) { - if (k == curr.key) { - found = true - isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] - } - index += 1 - curr = curr.init - } - } - - if (found) { - if (isDifferent) { - var newHead: ListMap.Node[K, V1] = null - var prev: ListMap.Node[K, V1] = null - var curr: ListMap[K, V1] = this - var i = 0 - while (i < index) { - val temp = new ListMap.Node(curr.key, curr.value, null) - if (prev ne null) { - prev._init = temp - } - prev = temp - curr = curr.init - if (newHead eq null) { - newHead = prev - } - i += 1 - } - val newNode = new ListMap.Node(curr.key, v, curr.init) - if (prev ne null) { - prev._init = newNode - } - releaseFence() - if (newHead eq null) newNode else newHead - } else { - this - } - } else { - new ListMap.Node(k, v, this) - } - } - - @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = - if (cur.isEmpty) acc.last - else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } - else removeInternal(k, cur.next, cur :: acc) - - override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) - - override private[immutable] def next: ListMap[K, V] = _init - - override def last: (K, V) = (key, value) - override def init: ListMap[K, V] = next - - } - - def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] - - private object EmptyListMap extends ListMap[Any, Nothing] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] = - it match { - case lm: ListMap[K, V] => lm - case lhm: collection.mutable.LinkedHashMap[K, V] => - // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each - // key-value pair - var current: ListMap[K, V] = empty[K, V] - var firstEntry = lhm._firstEntry - while (firstEntry ne null) { - current = new Node(firstEntry.key, firstEntry.value, current) - firstEntry = firstEntry.later - } - current - case _: collection.Map[K, V] | _: collection.MapView[K, V] => - // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end - var current: ListMap[K, V] = empty[K, V] - val iter = it.iterator - while (iter.hasNext) { - val (k, v) = iter.next() - current = new Node(k, v, current) - } - current - - case _ => (newBuilder[K, V] ++= it).result() - } - - /** Returns a new ListMap builder - * - * The implementation safely handles additions after `result()` without calling `clear()` - * - * @tparam K the map key type - * @tparam V the map value type - */ - def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] - - @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { - if (map.isEmpty) prevValue - else foldRightInternal(map.init, op(map.last, prevValue), op) - } -} - -/** Builder for ListMap. - * $multipleResults - */ -private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { - private[this] var isAliased: Boolean = false - private[this] var underlying: ListMap[K, V] @uncheckedCaptures = ListMap.empty - - override def clear(): Unit = { - underlying = ListMap.empty - isAliased = false - } - - override def result(): ListMap[K, V] = { - isAliased = true - releaseFence() - underlying - } - - override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) - - @tailrec - private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { - case n: ListMap.Node[K, V] => - if (n.key == key) { - n._value = value - true - } else { - insertValueAtKeyReturnFound(n.init, key, value) - } - case _ => false - } - - def addOne(key: K, value: V): this.type = { - if (isAliased) { - underlying = underlying.updated(key, value) - } else { - if (!insertValueAtKeyReturnFound(underlying, key, value)) { - underlying = new ListMap.Node(key, value, underlying) - } - } - this - } - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - if (isAliased) { - super.addAll(xs) - } else if (underlying.nonEmpty) { - xs match { - case m: collection.Map[K, V] => - // if it is a map, then its keys will not collide with themselves. - // therefor we only need to check the already-existing elements for collisions. - // No need to check the entire list - - val iter = m.iterator - var newUnderlying = underlying - while (iter.hasNext) { - val next = iter.next() - if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { - newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) - } - } - underlying = newUnderlying - this - - case _ => - super.addAll(xs) - } - } else xs match { - case lhm: collection.mutable.LinkedHashMap[K, V] => - // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value - var firstEntry = lhm._firstEntry - while (firstEntry ne null) { - underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) - firstEntry = firstEntry.later - } - this - - case _: collection.Map[K, V] | _: collection.MapView[K, V] => - val iter = xs.iterator - while (iter.hasNext) { - val (k, v) = iter.next() - underlying = new ListMap.Node(k, v, underlying) - } - - this - case _ => - super.addAll(xs) - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala deleted file mode 100644 index 719abd78e1e6..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ListSet.scala +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import mutable.{Builder, ImmutableBuilder} -import scala.annotation.tailrec -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** - * This class implements immutable sets using a list-based data structure. List set iterators and - * traversal methods visit elements in the order they were first inserted. - * - * Elements are stored internally in reversed insertion order, which means the newest element is at - * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and - * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which - * makes this collection suitable only for a small number of elements. - * - * Instances of `ListSet` represent empty sets; they can be either created by calling the - * constructor directly, or by applying the function `ListSet.empty`. - * - * @tparam A the type of the elements contained in this list set - * - * @define Coll ListSet - * @define coll list set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class ListSet[A] - extends AbstractSet[A] - with StrictOptimizedSetOps[A, ListSet, ListSet[A]] - with IterableFactoryDefaults[A, ListSet] - with DefaultSerializable { - - override protected[this] def className: String = "ListSet" - - override def size: Int = 0 - override def knownSize: Int = 0 - override def isEmpty: Boolean = true - - def contains(elem: A): Boolean = false - - def incl(elem: A): ListSet[A] = new Node(elem) - def excl(elem: A): ListSet[A] = this - - def iterator: scala.collection.Iterator[A] = { - var curr: ListSet[A] = this - var res: List[A] = Nil - while (!curr.isEmpty) { - res = curr.elem :: res - curr = curr.next - } - res.iterator - } - - protected def elem: A = throw new NoSuchElementException("elem of empty set") - protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") - - override def iterableFactory: IterableFactory[ListSet] = ListSet - - /** - * Represents an entry in the `ListSet`. - */ - protected class Node(override protected val elem: A) extends ListSet[A] { - - override def size = sizeInternal(this, 0) - override def knownSize: Int = -1 - @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = - if (n.isEmpty) acc - else sizeInternal(n.next, acc + 1) - - override def isEmpty: Boolean = false - - override def contains(e: A): Boolean = containsInternal(this, e) - - @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = - !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) - - override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) - - override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) - - @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = - if (cur.isEmpty) acc.last - else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) - else removeInternal(k, cur.next, cur :: acc) - - override protected def next: ListSet[A] = ListSet.this - - override def last: A = elem - - override def init: ListSet[A] = next - } -} - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list set with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @define Coll ListSet - * @define coll list set - */ -@SerialVersionUID(3L) -object ListSet extends IterableFactory[ListSet] { - - def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] = - it match { - case ls: ListSet[E] => ls - case _ if it.knownSize == 0 => empty[E] - case _ => (newBuilder[E] ++= it).result() - } - - private object EmptyListSet extends ListSet[Any] { - override def knownSize: Int = 0 - } - private[collection] def emptyInstance: ListSet[Any] = EmptyListSet - - def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] - - def newBuilder[A]: Builder[A, ListSet[A]] = - new ImmutableBuilder[A, ListSet[A]](empty) { - def addOne(elem: A): this.type = { elems = elems + elem; this } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala deleted file mode 100644 index 4abf433273f2..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/LongMap.scala +++ /dev/null @@ -1,492 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import java.lang.IllegalStateException - -import scala.collection.generic.{BitOperations, DefaultSerializationProxy} -import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} -import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance -import scala.language.implicitConversions -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Utility class for long maps. - */ -private[immutable] object LongMapUtils extends BitOperations.Long { - def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) - - def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) - else LongMap.Bin(p, m, t2, t1) - } - - def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { - case (left, LongMap.Nil) => left - case (LongMap.Nil, right) => right - case (left, right) => LongMap.Bin(prefix, mask, left, right) - } -} - -import LongMapUtils._ - -/** A companion object for long maps. - * - * @define Coll `LongMap` - */ -object LongMap { - def empty[T]: LongMap[T] = LongMap.Nil - def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) - def apply[T](elems: (Long, T)*): LongMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - - def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] = - newBuilder[V].addAll(coll).result() - - def newBuilder[V]: Builder[(Long, V), LongMap[V]] = - new ImmutableBuilder[(Long, V), LongMap[V]](empty) { - def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } - } - - private[immutable] case object Nil extends LongMap[Nothing] { - // Important, don't remove this! See IntMap for explanation. - override def equals(that : Any) = that match { - case _: this.type => true - case _: LongMap[_] => false // The only empty LongMaps are eq Nil - case _ => super.equals(that) - } - } - - private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] - else LongMap.Tip(key, s) - } - - private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { - def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] - else LongMap.Bin[S](prefix, mask, left, right) - } - } - - implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) - def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] - } - - implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] - private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) - def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] - } - - implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) - implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) -} - -// Iterator over a non-empty LongMap. -private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { - - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and - // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 65 - var index = 0 - var buffer = new Array[AnyRef](65) - - def pop() = { - index -= 1 - buffer(index).asInstanceOf[LongMap[V]] - } - - def push(x: LongMap[V]): Unit = { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - push(it) - - /** - * What value do we assign to a tip? - */ - def valueOf(tip: LongMap.Tip[V]): T - - def hasNext = index != 0 - @tailrec - final def next(): T = - pop() match { - case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { - push(right) - valueOf(t) - } - case LongMap.Bin(_, _, left, right) => { - push(right) - push(left) - next() - } - case t@LongMap.Tip(_, _) => valueOf(t) - // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap - // and don't return an LongMapIterator for LongMap.Nil. - case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") - } -} - -private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ - def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) -} - -private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ - def valueOf(tip: LongMap.Tip[V]) = tip.value -} - -private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ - def valueOf(tip: LongMap.Tip[V]) = tip.key -} - -/** - * Specialised immutable map structure for long keys, based on - * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * Note: This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with the long keys. - * - * @define Coll `immutable.LongMap` - * @define coll immutable long integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class LongMap[+T] extends AbstractMap[Long, T] - with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] - with Serializable { - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = { - //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? - val b = newSpecificBuilder - b.sizeHint(coll) - b.addAll(coll) - b.result() - } - override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = - new ImmutableBuilder[(Long, T), LongMap[T]](empty) { - def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } - } - - override def empty: LongMap[T] = LongMap.Nil - - override def toList = { - val buffer = new ListBuffer[(Long, T) @uncheckedCaptures] - foreach(buffer += _) - buffer.toList - } - - /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of long keys and corresponding values. - */ - def iterator: Iterator[(Long, T)] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapEntryIterator(this) - } - - /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ - override final def foreach[U](f: ((Long, T)) => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case LongMap.Tip(key, value) => f((key, value)) - case LongMap.Nil => - } - - override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } - case LongMap.Tip(key, value) => f(key, value) - case LongMap.Nil => - } - - override def keysIterator: Iterator[Long] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapKeyIterator(this) - } - - /** - * Loop over the keys of the map. The same as keys.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey[U](f: Long => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } - case LongMap.Tip(key, _) => f(key) - case LongMap.Nil => - } - - override def valuesIterator: Iterator[T] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapValueIterator(this) - } - - /** - * Loop over the values of the map. The same as values.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue[U](f: T => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } - case LongMap.Tip(_, value) => f(value) - case LongMap.Nil => - } - - override protected[this] def className = "LongMap" - - override def isEmpty = this eq LongMap.Nil - override def knownSize: Int = if (isEmpty) 0 else super.knownSize - override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)) - if ((left eq newleft) && (right eq newright)) this - else bin(prefix, mask, newleft, newright) - } - case LongMap.Tip(key, value) => - if (f((key, value))) this - else LongMap.Nil - case LongMap.Nil => LongMap.Nil - } - - override def transform[S](f: (Long, T) => S): LongMap[S] = this match { - case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) - case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) - case LongMap.Nil => LongMap.Nil - } - - final override def size: Int = this match { - case LongMap.Nil => 0 - case LongMap.Tip(_, _) => 1 - case LongMap.Bin(_, _, left, right) => left.size + right.size - } - - @tailrec - final def get(key: Long): Option[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) - case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None - case LongMap.Nil => None - } - - @tailrec - final override def getOrElse[S >: T](key: Long, default: => S): S = this match { - case LongMap.Nil => default - case LongMap.Tip(key2, value) => if (key == key2) value else default - case LongMap.Bin(prefix, mask, left, right) => - if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) - } - - @tailrec - final override def apply(key: Long): T = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") - case LongMap.Nil => throw new IllegalArgumentException("key not found") - } - - override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) - - override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) - else LongMap.Bin(prefix, mask, left, right.updated(key, value)) - case LongMap.Tip(key2, value2) => - if (key == key2) LongMap.Tip(key, value) - else join(key, LongMap.Tip(key, value), key2, this) - case LongMap.Nil => LongMap.Tip(key, value) - } - - /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update. - * @param value The value to use if there is no conflict. - * @param f The function used to resolve conflicts. - * @return The updated map. - */ - def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case LongMap.Tip(key2, value2) => - if (key == key2) LongMap.Tip(key, f(value2, value)) - else join(key, LongMap.Tip(key, value), key2, this) - case LongMap.Nil => LongMap.Tip(key, value) - } - - def removed(key: Long): LongMap[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this - else if (zero(key, mask)) bin(prefix, mask, left - key, right) - else bin(prefix, mask, left, right - key) - case LongMap.Tip(key2, _) => - if (key == key2) LongMap.Nil - else this - case LongMap.Nil => LongMap.Nil - } - - /** - * A combined transform and filter function. Returns an `LongMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] - else bin(prefix, mask, newleft, newright) - } - case LongMap.Tip(key, value) => f(key, value) match { - case None => LongMap.Nil - case Some(value2) => - //hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] - else LongMap.Tip(key, value2) - } - case LongMap.Nil => LongMap.Nil - } - - /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ - def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ - case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) - else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) - else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) - } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) - else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) - else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) - } - else { - if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join(p1, this, p2, that) - } - case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) - case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) - case (LongMap.Nil, x) => x - case (x, LongMap.Nil) => x - } - - /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ - def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { - case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) LongMap.Nil - else if (zero(p2, m1)) l1.intersectionWith(that, f) - else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { - if (!hasMatch(p1, p2, m2)) LongMap.Nil - else if (zero(p1, m2)) this.intersectionWith(l2, f) - else this.intersectionWith(r2, f) - } - case (LongMap.Tip(key, value), that) => that.get(key) match { - case None => LongMap.Nil - case Some(value2) => LongMap.Tip(key, f(key, value, value2)) - } - case (_, LongMap.Tip(key, value)) => this.get(key) match { - case None => LongMap.Nil - case Some(value2) => LongMap.Tip(key, f(key, value2, value)) - } - case (_, _) => LongMap.Nil - } - - /** - * Left biased intersection. Returns the map that has all the same mappings as this but only for keys - * which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ - def intersection[R](that: LongMap[R]): LongMap[T] = - this.intersectionWith(that, (key: Long, value: T, value2: R) => value) - - def ++[S >: T](that: LongMap[S]) = - this.unionWith[S](that, (key, x, y) => y) - - @tailrec - final def firstKey: Long = this match { - case LongMap.Bin(_, _, l, r) => l.firstKey - case LongMap.Tip(k, v) => k - case LongMap.Nil => throw new IllegalStateException("Empty set") - } - - @tailrec - final def lastKey: Long = this match { - case LongMap.Bin(_, _, l, r) => r.lastKey - case LongMap.Tip(k , v) => k - case LongMap.Nil => throw new IllegalStateException("Empty set") - } - - def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) - - def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - - override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = - super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such - - override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that) - - def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = - strictOptimizedCollect(LongMap.newBuilder[V2], pf) - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) -} diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala deleted file mode 100644 index 6daad829bf55..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Map.scala +++ /dev/null @@ -1,694 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.Map.Map4 -import scala.collection.mutable.{Builder, ReusableBuilder} -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Base type of immutable Maps */ -trait Map[K, +V] - extends Iterable[(K, V)] - with collection.Map[K, V] - with MapOps[K, V, Map, Map[K, V]] - with MapFactoryDefaults[K, V, Map, Iterable] { - - override def mapFactory: scala.collection.MapFactory[Map] = Map - - override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]] - - /** The same map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) -} - -/** Base trait of immutable Maps implementations - * - * @define coll immutable map - * @define Coll `immutable.Map` - */ -trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] - extends IterableOps[(K, V), Iterable, C] - with collection.MapOps[K, V, CC, C] { - - protected def coll: C with CC[K, V] - - /** Removes a key from this map, returning a new map. - * - * @param key the key to be removed - * @return a new map without a binding for ''key'' - */ - def removed(key: K): C - - /** Alias for `removed` */ - @`inline` final def - (key: K): C = removed(key) - - @deprecated("Use -- with an explicit collection", "2.13.0") - def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * $willForceEvaluation - * - * @param keys the collection containing the removed elements. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the elements of `elems`. - */ - def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _) - - /** Alias for `removedAll` */ - @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys) - - /** Creates a new map obtained by updating this map with a given key/value pair. - * @param key the key - * @param value the value - * @tparam V1 the type of the added value - * @return A new map with the new key/value mapping added to this map. - */ - def updated[V1 >: V](key: K, value: V1): CC[K, V1] - - /** - * Update a mapping for the specified key and its current optionally-mapped value - * (`Some` if there is current mapping, `None` if not). - * - * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. - * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). - * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. - * - * @param key the key value - * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping - * @return A new map with the updated mapping with the key - */ - def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { - val previousValue = this.get(key) - remappingFunction(previousValue) match { - case None => previousValue.fold(coll)(_ => this.removed(key).coll) - case Some(nextValue) => - if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll - else coll.updated(key, nextValue) - } - } - - /** - * Alias for `updated` - * - * @param kv the key/value pair. - * @tparam V1 the type of the value in the key/value pair. - * @return A new map with the new binding added to this map. - */ - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) - - /** This function transforms all the values of mappings contained - * in this map with function `f`. - * - * @param f A function over keys and values - * @return the updated map - */ - def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } - - override def keySet: Set[K] = new ImmutableKeySet - - /** The implementation class of the set returned by `keySet` */ - protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { - def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem - def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this - } - -} - -trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] - extends MapOps[K, V, CC, C] - with collection.StrictOptimizedMapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), Iterable, C] { - - override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = { - var result: CC[K, V1] = coll - val it = that.iterator - while (it.hasNext) result = result + it.next() - result - } -} - - -/** - * $factoryInfo - * @define coll immutable map - * @define Coll `immutable.Map` - */ -@SerialVersionUID(3L) -object Map extends MapFactory[Map] { - - @SerialVersionUID(3L) - class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V) - extends AbstractMap[K, V] - with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { - - def get(key: K): Option[V] = underlying.get(key) - - override def default(key: K): V = defaultValue(key) - - override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory - - def iterator: Iterator[(K, V)] = underlying.iterator - - override def isEmpty: Boolean = underlying.isEmpty - - override def mapFactory: MapFactory[Map] = underlying.mapFactory - - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = - new WithDefault(underlying.concat(xs), defaultValue) - - def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) - - def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = - new WithDefault[K, V1](underlying.updated(key, value), defaultValue) - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = - new WithDefault[K, V](mapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = - Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) - } - - def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] = - it match { - case it: Iterable[_] if it.isEmpty => empty[K, V] - case m: Map[K, V] => m - case _ => (newBuilder[K, V] ++= it).result() - } - - def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl - - @SerialVersionUID(3L) - private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { - override def size: Int = 0 - override def knownSize: Int = 0 - override def isEmpty: Boolean = true - override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) - override def contains(key: Any) = false - def get(key: Any): Option[Nothing] = None - override def getOrElse [V1](key: Any, default: => V1): V1 = default - def iterator: Iterator[(Any, Nothing)] = Iterator.empty - override def keysIterator: Iterator[Any] = Iterator.empty - override def valuesIterator: Iterator[Nothing] = Iterator.empty - def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) - def removed(key: Any): Map[Any, Nothing] = this - override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match { - case m: immutable.Map[Any, V2] => m - case _ => super.concat(suffix) - } - } - - @SerialVersionUID(3L) - final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - override def size: Int = 1 - override def knownSize: Int = 1 - override def isEmpty: Boolean = false - override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = key == key1 - def get(key: K): Option[V] = - if (key == key1) Some(value1) else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 else default - def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) - override def keysIterator: Iterator[K] = Iterator.single(key1) - override def valuesIterator: Iterator[V] = Iterator.single(value1) - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map1(key1, value) - else new Map2(key1, value1, key, value) - def removed(key: K): Map[K, V] = - if (key == key1) Map.empty else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = - if (pred((key1, value1)) != isFlipped) this else Map.empty - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] - else new Map1(key1, walue1) - } - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 1 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } - - @SerialVersionUID(3L) - final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - override def size: Int = 2 - override def knownSize: Int = 2 - override def isEmpty: Boolean = false - override def apply(key: K): V = - if (key == key1) value1 - else if (key == key2) value2 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = (key == key1) || (key == key2) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else default - def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { - override protected def nextResult(k: K, v: V): (K, V) = (k, v) - } - override def keysIterator: Iterator[K] = new Map2Iterator[K] { - override protected def nextResult(k: K, v: V): K = k - } - override def valuesIterator: Iterator[V] = new Map2Iterator[V] { - override protected def nextResult(k: K, v: V): V = v - } - - private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure { - private[this] var i = 0 - override def hasNext: Boolean = i < 2 - override def next(): A = { - val result = i match { - case 0 => nextResult(key1, value1) - case 1 => nextResult(key2, value2) - case _ => Iterator.empty.next() - } - i += 1 - result - } - override def drop(n: Int): Iterator[A] = { i += n; this } - protected def nextResult(k: K, v: V @uncheckedVariance): A - } - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map2(key1, value, key2, value2) - else if (key == key2) new Map2(key1, value1, key2, value) - else new Map3(key1, value1, key2, value2, key, value) - def removed(key: K): Map[K, V] = - if (key == key1) new Map1(key2, value2) - else if (key == key2) new Map1(key1, value1) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { - var k1 = null.asInstanceOf[K] - var v1 = null.asInstanceOf[V] - var n = 0 - if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1} - if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1} - - n match { - case 0 => Map.empty - case 1 => new Map1(k1, v1) - case 2 => this - } - } - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - val walue2 = f(key2, value2) - if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && - (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] - else new Map2(key1, walue1, key2, walue2) - } - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 2 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key2, value2) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } - - @SerialVersionUID(3L) - class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - override def size: Int = 3 - override def knownSize: Int = 3 - override def isEmpty: Boolean = false - override def apply(key: K): V = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else default - def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { - override protected def nextResult(k: K, v: V): (K, V) = (k, v) - } - override def keysIterator: Iterator[K] = new Map3Iterator[K] { - override protected def nextResult(k: K, v: V): K = k - } - override def valuesIterator: Iterator[V] = new Map3Iterator[V] { - override protected def nextResult(k: K, v: V): V = v - } - - private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure { - private[this] var i = 0 - override def hasNext: Boolean = i < 3 - override def next(): A = { - val result = i match { - case 0 => nextResult(key1, value1) - case 1 => nextResult(key2, value2) - case 2 => nextResult(key3, value3) - case _ => Iterator.empty.next() - } - i += 1 - result - } - override def drop(n: Int): Iterator[A] = { i += n; this } - protected def nextResult(k: K, v: V @uncheckedVariance): A - } - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map3(key1, value, key2, value2, key3, value3) - else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) - else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) - else new Map4(key1, value1, key2, value2, key3, value3, key, value) - def removed(key: K): Map[K, V] = - if (key == key1) new Map2(key2, value2, key3, value3) - else if (key == key2) new Map2(key1, value1, key3, value3) - else if (key == key3) new Map2(key1, value1, key2, value2) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { - var k1, k2 = null.asInstanceOf[K] - var v1, v2 = null.asInstanceOf[V] - var n = 0 - if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} - if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} - if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1} - - n match { - case 0 => Map.empty - case 1 => new Map1(k1, v1) - case 2 => new Map2(k1, v1, k2, v2) - case 3 => this - } - } - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - val walue2 = f(key2, value2) - val walue3 = f(key3, value3) - if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && - (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && - (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] - else new Map3(key1, walue1, key2, walue2, key3, walue3) - } - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 3 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key2, value2) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key3, value3) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } - - @SerialVersionUID(3L) - final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) - extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - - override def size: Int = 4 - override def knownSize: Int = 4 - override def isEmpty: Boolean = false - override def apply(key: K): V = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else if (key == key4) Some(value4) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else default - def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { - override protected def nextResult(k: K, v: V): (K, V) = (k, v) - } - override def keysIterator: Iterator[K] = new Map4Iterator[K] { - override protected def nextResult(k: K, v: V): K = k - } - override def valuesIterator: Iterator[V] = new Map4Iterator[V] { - override protected def nextResult(k: K, v: V): V = v - } - - private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure { - private[this] var i = 0 - override def hasNext: Boolean = i < 4 - override def next(): A = { - val result = i match { - case 0 => nextResult(key1, value1) - case 1 => nextResult(key2, value2) - case 2 => nextResult(key3, value3) - case 3 => nextResult(key4, value4) - case _ => Iterator.empty.next() - } - i += 1 - result - } - override def drop(n: Int): Iterator[A] = { i += n; this } - protected def nextResult(k: K, v: V @uncheckedVariance): A - } - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) - else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) - else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) - else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) - def removed(key: K): Map[K, V] = - if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) - else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) - else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { - var k1, k2, k3 = null.asInstanceOf[K] - var v1, v2, v3 = null.asInstanceOf[V] - var n = 0 - if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} - if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} - if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1} - if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1} - - n match { - case 0 => Map.empty - case 1 => new Map1(k1, v1) - case 2 => new Map2(k1, v1, k2, v2) - case 3 => new Map3(k1, v1, k2, v2, k3, v3) - case 4 => this - } - } - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - val walue2 = f(key2, value2) - val walue3 = f(key3, value3) - val walue4 = f(key4, value4) - if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && - (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && - (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && - (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] - else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) - } - private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = - builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 4 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key2, value2) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key3, value3) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key4, value4) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] - -private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { - private[this] var elems: Map[K, V] @uncheckedCaptures = Map.empty - private[this] var switchedToHashMapBuilder: Boolean = false - private[this] var hashMapBuilder: HashMapBuilder[K, V] @uncheckedCaptures = _ - - private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = - if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) - else elems.getOrElse(key, value) - - override def clear(): Unit = { - elems = Map.empty - if (hashMapBuilder != null) { - hashMapBuilder.clear() - } - switchedToHashMapBuilder = false - } - - override def result(): Map[K, V] = - if (switchedToHashMapBuilder) hashMapBuilder.result() else elems - - def addOne(key: K, value: V): this.type = { - if (switchedToHashMapBuilder) { - hashMapBuilder.addOne(key, value) - } else if (elems.size < 4) { - elems = elems.updated(key, value) - } else { - // assert(elems.size == 4) - if (elems.contains(key)) { - elems = elems.updated(key, value) - } else { - switchedToHashMapBuilder = true - if (hashMapBuilder == null) { - hashMapBuilder = new HashMapBuilder - } - elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) - hashMapBuilder.addOne(key, value) - } - } - - this - } - - def addOne(elem: (K, V)) = addOne(elem._1, elem._2) - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = - if (switchedToHashMapBuilder) { - hashMapBuilder.addAll(xs) - this - } else { - super.addAll(xs) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala deleted file mode 100644 index f26d9728e5ad..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** `NumericRange` is a more generic version of the - * `Range` class which works with arbitrary types. - * It must be supplied with an `Integral` implementation of the - * range type. - * - * Factories for likely types include `Range.BigInt`, `Range.Long`, - * and `Range.BigDecimal`. `Range.Int` exists for completeness, but - * the `Int`-based `scala.Range` should be more performant. - * - * {{{ - * val r1 = Range(0, 100, 1) - * val veryBig = Int.MaxValue.toLong + 1 - * val r2 = Range.Long(veryBig, veryBig + 100, 1) - * assert(r1 sameElements r2.map(_ - veryBig)) - * }}} - * - * @define Coll `NumericRange` - * @define coll numeric range - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3L) -sealed class NumericRange[T]( - val start: T, - val end: T, - val step: T, - val isInclusive: Boolean -)(implicit - num: Integral[T] -) - extends AbstractSeq[T] - with IndexedSeq[T] - with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] - with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] - with IterableFactoryDefaults[T, IndexedSeq] - with Serializable { self => - - override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { - import scala.collection.convert._ - import impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) - case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) - case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) - } - s.asInstanceOf[S with EfficientSplit] - } - - - /** Note that NumericRange must be invariant so that constructs - * such as "1L to 10 by 5" do not infer the range type as AnyVal. - */ - import num._ - - // See comment in Range for why this must be lazy. - override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) - override lazy val isEmpty: Boolean = ( - (num.gt(start, end) && num.gt(step, num.zero)) - || (num.lt(start, end) && num.lt(step, num.zero)) - || (num.equiv(start, end) && !isInclusive) - ) - override def last: T = - if (isEmpty) Nil.head - else locationAfterN(length - 1) - override def init: NumericRange[T] = - if (isEmpty) Nil.init - else new NumericRange(start, end - step, step, isInclusive) - - override def head: T = if (isEmpty) Nil.head else start - override def tail: NumericRange[T] = - if (isEmpty) Nil.tail - else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) - else new NumericRange.Exclusive(start + step, end, step) - - /** Create a new range with the start and end values of this range and - * a new `step`. - */ - def by(newStep: T): NumericRange[T] = copy(start, end, newStep) - - - /** Create a copy of this range. - */ - def copy(start: T, end: T, step: T): NumericRange[T] = - new NumericRange(start, end, step, isInclusive) - - @throws[IndexOutOfBoundsException] - def apply(idx: Int): T = { - if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})") - else locationAfterN(idx) - } - - override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { - var count = 0 - var current = start - while (count < length) { - f(current) - current += step - count += 1 - } - } - - // TODO: these private methods are straight copies from Range, duplicated - // to guard against any (most likely illusory) performance drop. They should - // be eliminated one way or another. - - // Tests whether a number is within the endpoints, without testing - // whether it is a member of the sequence (i.e. when step > 1.) - private def isWithinBoundaries(elem: T) = !isEmpty && ( - (step > zero && start <= elem && elem <= last ) || - (step < zero && last <= elem && elem <= start) - ) - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private def locationAfterN(n: Int): T = start + (step * fromInt(n)) - - private def crossesTheEndAfterN(n: Int): Boolean = { - // if we're sure that subtraction in the context of T won't overflow, we use this function - // to calculate the length of the range - def unsafeRangeLength(r: NumericRange[T]): T = { - val diff = num.minus(r.end, r.start) - val quotient = num.quot(diff, r.step) - val remainder = num.rem(diff, r.step) - if (!r.isInclusive && num.equiv(remainder, num.zero)) - num.max(quotient, num.zero) - else - num.max(num.plus(quotient, num.one), num.zero) - } - - // detects whether value can survive a bidirectional trip to -and then from- Int. - def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) - - val stepIsInTheSameDirectionAsStartToEndVector = - (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) - - if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 - - val sameSign = num.equiv(num.sign(start), num.sign(end)) - - if (sameSign) { // subtraction is safe - val len = unsafeRangeLength(this) - if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) - } else { - // split to two ranges, which subtraction is safe in both of them (around zero) - val stepsRemainderToZero = num.rem(start, step) - val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) - val closestToZero = if (walksOnZero) -step else stepsRemainderToZero - - /* - When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, - so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). - Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. - After performing such operation, there are some elements remaining in between and around zero, - which their length is represented by carry. - */ - val (l: NumericRange[T], r: NumericRange[T], carry: Int) = - if (num.lt(start, num.zero)) { - if (walksOnZero) { - val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) - (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) - } else { - (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) - } - } else { - if (walksOnZero) { - val twoStepsAfterZero = num.times(step, num.fromInt(2)) - (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) - } else { - val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) - (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) - } - } - - val leftLength = unsafeRangeLength(l) - val rightLength = unsafeRangeLength(r) - - // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, - // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) - if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) - n - num.toInt(leftLength) - carry >= num.toInt(rightLength) - else - num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) - } - } - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private def newEmptyRange(value: T) = NumericRange(value, value, step) - - override def take(n: Int): NumericRange[T] = { - if (n <= 0 || isEmpty) newEmptyRange(start) - else if (crossesTheEndAfterN(n)) this - else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) - } - - override def drop(n: Int): NumericRange[T] = { - if (n <= 0 || isEmpty) this - else if (crossesTheEndAfterN(n)) newEmptyRange(end) - else copy(locationAfterN(n), end, step) - } - - override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) - - override def reverse: NumericRange[T] = - if (isEmpty) this - else { - val newStep = -step - if (num.sign(newStep) == num.sign(step)) { - throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") - } else new NumericRange.Inclusive(last, start, newStep) - } - - import NumericRange.defaultOrdering - - override def min[T1 >: T](implicit ord: Ordering[T1]): T = - // We can take the fast path: - // - If the Integral of this NumericRange is also the requested Ordering - // (Integral <: Ordering). This can happen for custom Integral types. - // - The Ordering is the default Ordering of a well-known Integral type. - if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.sign(step) > zero) head - else last - } else super.min(ord) - - override def max[T1 >: T](implicit ord: Ordering[T1]): T = - // See comment for fast path in min(). - if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.sign(step) > zero) last - else head - } else super.max(ord) - - // a well-typed contains method. - def containsTyped(x: T): Boolean = - isWithinBoundaries(x) && (((x - start) % step) == zero) - - override def contains[A1 >: T](x: A1): Boolean = - try containsTyped(x.asInstanceOf[T]) - catch { case _: ClassCastException => false } - - override def sum[B >: T](implicit num: Numeric[B]): B = { - if (isEmpty) num.zero - else if (size == 1) head - else { - // If there is no overflow, use arithmetic series formula - // a + ... (n terms total) ... + b = n*(a+b)/2 - if ((num eq scala.math.Numeric.IntIsIntegral)|| - (num eq scala.math.Numeric.ShortIsIntegral)|| - (num eq scala.math.Numeric.ByteIsIntegral)|| - (num eq scala.math.Numeric.CharIsIntegral)) { - // We can do math with no overflow in a Long--easy - val exact = (size * ((num toLong head) + (num toInt last))) / 2 - num fromInt exact.toInt - } - else if (num eq scala.math.Numeric.LongIsIntegral) { - // Uh-oh, might be overflow, so we have to divide before we overflow. - // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying - val a = head.toLong - val b = last.toLong - val ans = - if ((size & 1) == 0) (size / 2) * (a + b) - else size * { - // Sum is even, but we might overflow it, so divide in pieces and add back remainder - val ha = a/2 - val hb = b/2 - ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 - } - ans.asInstanceOf[B] - } - else if ((num eq scala.math.Numeric.BigIntIsIntegral) || - (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { - // No overflow, so we can use arithmetic series formula directly - // (not going to worry about running out of memory) - val numAsIntegral = num.asInstanceOf[Integral[B]] - import numAsIntegral._ - ((num fromInt size) * (head + last)) / (num fromInt 2) - } - else { - // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) - if (isEmpty) num.zero - else { - var acc = num.zero - var i = head - var idx = 0 - while(idx < length) { - acc = num.plus(acc, i) - i = i + step - idx = idx + 1 - } - acc - } - } - } - } - - override lazy val hashCode: Int = super.hashCode() - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - - override def equals(other: Any): Boolean = other match { - case x: NumericRange[_] => - (x canEqual this) && (length == x.length) && ( - (isEmpty) || // all empty sequences are equal - (start == x.start && last == x.last) // same length and same endpoints implies equality - ) - case _ => - super.equals(other) - } - - override def toString: String = { - val empty = if (isEmpty) "empty " else "" - val preposition = if (isInclusive) "to" else "until" - val stepped = if (step == 1) "" else s" by $step" - s"${empty}NumericRange $start $preposition $end$stepped" - } - - override protected[this] def className = "NumericRange" -} - -/** A companion object for numeric ranges. - * @define Coll `NumericRange` - * @define coll numeric range - */ -object NumericRange { - private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { - def FAIL(boundary: T, step: T): Unit = { - val msg = boundary match { - case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" - case _ => "Precision" - } - throw new IllegalArgumentException( - s"$msg inadequate to represent steps of size $step near $boundary" - ) - } - if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) - if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) - } - - /** Calculates the number of elements in a range given start, end, step, and - * whether or not it is inclusive. Throws an exception if step == 0 or - * the number of elements exceeds the maximum Int. - */ - def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { - val zero = num.zero - val upward = num.lt(start, end) - val posStep = num.gt(step, zero) - - if (step == zero) throw new IllegalArgumentException("step cannot be 0.") - else if (start == end) if (isInclusive) 1 else 0 - else if (upward != posStep) 0 - else { - /* We have to be frightfully paranoid about running out of range. - * We also can't assume that the numbers will fit in a Long. - * We will assume that if a > 0, -a can be represented, and if - * a < 0, -a+1 can be represented. We also assume that if we - * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). - * And we assume that numbers wrap rather than cap when they overflow. - */ - // Check whether we can short-circuit by deferring to Int range. - val startint = num.toInt(start) - if (start == num.fromInt(startint)) { - val endint = num.toInt(end) - if (end == num.fromInt(endint)) { - val stepint = num.toInt(step) - if (step == num.fromInt(stepint)) { - return { - if (isInclusive) Range.inclusive(startint, endint, stepint).length - else Range (startint, endint, stepint).length - } - } - } - } - // If we reach this point, deferring to Int failed. - // Numbers may be big. - if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { - bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) - } - val one = num.one - val limit = num.fromInt(Int.MaxValue) - def check(t: T): T = - if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") - else t - // If the range crosses zero, it might overflow when subtracted - val startside = num.sign(start) - val endside = num.sign(end) - num.toInt{ - if (num.gteq(num.times(startside, endside), zero)) { - // We're sure we can subtract these numbers. - // Note that we do not use .rem because of different conventions for Long and BigInt - val diff = num.minus(end, start) - val quotient = check(num.quot(diff, step)) - val remainder = num.minus(diff, num.times(quotient, step)) - if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) - } - else { - // We might not even be able to subtract these numbers. - // Jump in three pieces: - // * start to -1 or 1, whichever is closer (waypointA) - // * one step, which will take us at least to 0 (ends at waypointB) - // * (except with really small numbers) - // * there to the end - val negone = num.fromInt(-1) - val startlim = if (posStep) negone else one - //Use start value if the start value is closer to zero than startlim - // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 - val startdiff = { - if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) - start - else - num.minus(startlim, start) - } - val startq = check(num.quot(startdiff, step)) - val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) - val waypointB = num.plus(waypointA, step) - check { - if (num.lt(waypointB, end) != upward) { - // No last piece - if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) - else num.plus(startq, one) - } - else { - // There is a last piece - val enddiff = num.minus(end,waypointB) - val endq = check(num.quot(enddiff, step)) - val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) - // Now we have to tally up all the pieces - // 1 for the initial value - // startq steps to waypointA - // 1 step to waypointB - // endq steps to the end (one less if !isInclusive and last==end) - num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) - } - } - } - } - } - } - - @SerialVersionUID(3L) - class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, true) { - override def copy(start: T, end: T, step: T): Inclusive[T] = - NumericRange.inclusive(start, end, step) - - def exclusive: Exclusive[T] = NumericRange(start, end, step) - } - - @SerialVersionUID(3L) - class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, false) { - override def copy(start: T, end: T, step: T): Exclusive[T] = - NumericRange(start, end, step) - - def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) - } - - def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = - new Exclusive(start, end, step) - def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = - new Inclusive(start, end, step) - - private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( - Numeric.BigIntIsIntegral -> Ordering.BigInt, - Numeric.IntIsIntegral -> Ordering.Int, - Numeric.ShortIsIntegral -> Ordering.Short, - Numeric.ByteIsIntegral -> Ordering.Byte, - Numeric.CharIsIntegral -> Ordering.Char, - Numeric.LongIsIntegral -> Ordering.Long, - Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal - ) - - @SerialVersionUID(3L) - private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { - import num.mkNumericOps - - private[this] var _hasNext = !self.isEmpty - private[this] var _next: T @uncheckedCaptures = self.start - private[this] val lastElement: T = if (_hasNext) self.last else self.start - override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 - def hasNext: Boolean = _hasNext - def next(): T = { - if (!_hasNext) Iterator.empty.next() - val value = _next - _hasNext = value != lastElement - _next = num.plus(value, self.step) - value - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala deleted file mode 100644 index 929c79ce588a..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Queue.scala +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{Builder, ListBuffer} -import language.experimental.captureChecking - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. - * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the - * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. - * - * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case - * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, - * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] - * section on `Immutable Queues` for more information. - * - * @define Coll `immutable.Queue` - * @define coll immutable queue - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ - -sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, Queue, Queue[A]] - with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] - with StrictOptimizedSeqOps[A, Queue, Queue[A]] - with IterableFactoryDefaults[A, Queue] - with DefaultSerializable { - - override def iterableFactory: SeqFactory[Queue] = Queue - - /** Returns the `n`-th element of this queue. - * The first element is at position `0`. - * - * @param n index of the element to return - * @return the element at position `n` in this queue. - * @throws NoSuchElementException if the queue is too short. - */ - override def apply(n: Int): A = { - def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) - - var index = 0 - var curr = out - - while (index < n && curr.nonEmpty) { - index += 1 - curr = curr.tail - } - - if (index == n) { - if (curr.nonEmpty) curr.head - else if (in.nonEmpty) in.last - else indexOutOfRange() - } else { - val indexFromBack = n - index - val inLength = in.length - if (indexFromBack >= inLength) indexOutOfRange() - else in(inLength - indexFromBack - 1) - } - } - - /** Returns the elements in the list as an iterator - */ - override def iterator: Iterator[A] = out.iterator.concat(in.reverse) - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = in.isEmpty && out.isEmpty - - override def head: A = - if (out.nonEmpty) out.head - else if (in.nonEmpty) in.last - else throw new NoSuchElementException("head on empty queue") - - override def tail: Queue[A] = - if (out.nonEmpty) new Queue(in, out.tail) - else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) - else throw new NoSuchElementException("tail on empty queue") - - override def last: A = - if (in.nonEmpty) in.head - else if (out.nonEmpty) out.last - else throw new NoSuchElementException("last on empty queue") - - /* This is made to avoid inefficient implementation of iterator. */ - override def forall(p: A => Boolean): Boolean = - in.forall(p) && out.forall(p) - - /* This is made to avoid inefficient implementation of iterator. */ - override def exists(p: A => Boolean): Boolean = - in.exists(p) || out.exists(p) - - override protected[this] def className = "Queue" - - /** Returns the length of the queue. */ - override def length: Int = in.length + out.length - - override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) - - override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) - - override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = { - val newIn = that match { - case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) - case that: List[B] => that reverse_::: this.in - case _ => - var result: List[B] = this.in - val iter = that.iterator - while (iter.hasNext) { - result = iter.next() :: result - } - result - } - if (newIn eq this.in) this else new Queue[B](newIn, this.out) - } - - /** Creates a new queue with element added at the end - * of the old queue. - * - * @param elem the element to insert - */ - def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) - - /** Creates a new queue with all elements provided by an `Iterable` object - * added at the end of the old queue. - * - * The elements are appended in the order they are given out by the - * iterator. - * - * @param iter an iterable object - */ - @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") - @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) - - /** Creates a new queue with all elements provided by an `Iterable` object - * added at the end of the old queue. - * - * The elements are appended in the order they are given out by the - * iterator. - * - * @param iter an iterable object - */ - def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) - - /** Returns a tuple with the first element in the queue, - * and a new queue with this element removed. - * - * @throws NoSuchElementException - * @return the first element of the queue. - */ - def dequeue: (A, Queue[A]) = out match { - case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) - case x :: xs => (x, new Queue(in, xs)) - case _ => throw new NoSuchElementException("dequeue on empty queue") - } - - /** Optionally retrieves the first element and a queue of the remaining elements. - * - * @return A tuple of the first element of the queue, and a new queue with this element removed. - * If the queue is empty, `None` is returned. - */ - def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @throws NoSuchElementException - * @return the first element. - */ - def front: A = head - - /** Returns a string representation of this queue. - */ - override def toString(): String = mkString("Queue(", ", ", ")") -} - -/** $factoryInfo - * @define Coll `immutable.Queue` - * @define coll immutable queue - */ -@SerialVersionUID(3L) -object Queue extends StrictOptimizedSeqFactory[Queue] { - def newBuilder[sealed A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) - - def from[A](source: IterableOnce[A]^): Queue[A] = source match { - case q: Queue[A] => q - case _ => - val list = List.from(source) - if (list.isEmpty) empty - else new Queue(Nil, list) - } - - def empty[A]: Queue[A] = EmptyQueue - override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) - - private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala deleted file mode 100644 index 459591d1a9cb..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Range.scala +++ /dev/null @@ -1,673 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.convert.impl.RangeStepper -import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** The `Range` class represents integer values in range - * ''[start;end)'' with non-zero step value `step`. - * It's a special case of an indexed sequence. - * For example: - * - * {{{ - * val r1 = 0 until 10 - * val r2 = r1.start until r1.end by r1.step + 1 - * println(r2.length) // = 5 - * }}} - * - * Ranges that contain more than `Int.MaxValue` elements can be created, but - * these overfull ranges have only limited capabilities. Any method that - * could require a collection of over `Int.MaxValue` length to be created, or - * could be asked to index beyond `Int.MaxValue` elements will throw an - * exception. Overfull ranges can safely be reduced in size by changing - * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, - * `equals`, and access to the ends of the range (`head`, `last`, `tail`, - * `init`) are also permitted on overfull ranges. - * - * @param start the start of this range. - * @param end the end of the range. For exclusive ranges, e.g. - * `Range(0,3)` or `(0 until 3)`, this is one - * step past the last one in the range. For inclusive - * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, - * it may be in the range if it is not skipped by the step size. - * To find the last element inside a non-empty range, - * use `last` instead. - * @param step the step for the range. - * - * @define coll range - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define doesNotUseBuilders - * '''Note:''' this method does not use builders to construct a new range, - * and its complexity is O(1). - */ -@SerialVersionUID(3L) -sealed abstract class Range( - val start: Int, - val end: Int, - val step: Int -) - extends AbstractSeq[Int] - with IndexedSeq[Int] - with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] - with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] - with IterableFactoryDefaults[Int, IndexedSeq] - with Serializable { range => - - final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) - - override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { - val st = new RangeStepper(start, step, 0, length) - val r = - if (shape.shape == StepperShape.IntShape) st - else { - assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") - AnyStepper.ofParIntStepper(st) - } - r.asInstanceOf[S with EfficientSplit] - } - - private[this] def gap = end.toLong - start.toLong - private[this] def isExact = gap % step == 0 - private[this] def hasStub = isInclusive || !isExact - private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) - - def isInclusive: Boolean - - final override val isEmpty: Boolean = ( - (start > end && step > 0) - || (start < end && step < 0) - || (start == end && !isInclusive) - ) - - private[this] val numRangeElements: Int = { - if (step == 0) throw new IllegalArgumentException("step cannot be 0.") - else if (isEmpty) 0 - else { - val len = longLength - if (len > scala.Int.MaxValue) -1 - else len.toInt - } - } - - final def length = if (numRangeElements < 0) fail() else numRangeElements - - // This field has a sensible value only for non-empty ranges - private[this] val lastElement = step match { - case 1 => if (isInclusive) end else end-1 - case -1 => if (isInclusive) end else end+1 - case _ => - val remainder = (gap % step).toInt - if (remainder != 0) end - remainder - else if (isInclusive) end - else end - step - } - - /** The last element of this range. This method will return the correct value - * even if there are too many elements to iterate over. - */ - final override def last: Int = - if (isEmpty) throw Range.emptyRangeError("last") else lastElement - final override def head: Int = - if (isEmpty) throw Range.emptyRangeError("head") else start - - /** Creates a new range containing all the elements of this range except the last one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the last one. - */ - final override def init: Range = - if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) - - /** Creates a new range containing all the elements of this range except the first one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the first one. - */ - final override def tail: Range = { - if (isEmpty) throw Range.emptyRangeError("tail") - if (numRangeElements == 1) newEmptyRange(end) - else if(isInclusive) new Range.Inclusive(start + step, end, step) - else new Range.Exclusive(start + step, end, step) - } - - override def map[B](f: Int => B): IndexedSeq[B] = { - validateMaxLength() - super.map(f) - } - - final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = - if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) - - /** Create a new range with the `start` and `end` values of this range and - * a new `step`. - * - * @return a new range with a different step - */ - final def by(step: Int): Range = copy(start, end, step) - - // Check cannot be evaluated eagerly because we have a pattern where - // ranges are constructed like: "x to y by z" The "x to y" piece - // should not trigger an exception. So the calculation is delayed, - // which means it will not fail fast for those cases where failing was - // correct. - private[this] def validateMaxLength(): Unit = { - if (numRangeElements < 0) - fail() - } - private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) - private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") - - @throws[IndexOutOfBoundsException] - final def apply(idx: Int): Int = { - validateMaxLength() - if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})") - else start + (step * idx) - } - - /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { - // Implementation chosen on the basis of favorable microbenchmarks - // Note--initialization catches step == 0 so we don't need to here - if (!isEmpty) { - var i = start - while (true) { - f(i) - if (i == lastElement) return - i += step - } - } - } - - override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = - elem match { - case i: Int => - val pos = posOf(i) - if (pos >= from) pos else -1 - case _ => super.indexOf(elem, from) - } - - override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = - elem match { - case i: Int => - val pos = posOf(i) - if (pos <= end) pos else -1 - case _ => super.lastIndexOf(elem, end) - } - - private[this] def posOf(i: Int): Int = - if (contains(i)) (i - start) / step else -1 - - override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match { - case other: Range => - (this.length : @annotation.switch) match { - case 0 => other.isEmpty - case 1 => other.length == 1 && this.start == other.start - case n => other.length == n && ( - (this.start == other.start) - && (this.step == other.step) - ) - } - case _ => super.sameElements(that) - } - - /** Creates a new range containing the first `n` elements of this range. - * - * @param n the number of elements to take. - * @return a new range consisting of `n` first elements. - */ - final override def take(n: Int): Range = - if (n <= 0 || isEmpty) newEmptyRange(start) - else if (n >= numRangeElements && numRangeElements >= 0) this - else { - // May have more than Int.MaxValue elements in range (numRangeElements < 0) - // but the logic is the same either way: take the first n - new Range.Inclusive(start, locationAfterN(n - 1), step) - } - - /** Creates a new range containing all the elements of this range except the first `n` elements. - * - * @param n the number of elements to drop. - * @return a new range consisting of all the elements of this range except `n` first elements. - */ - final override def drop(n: Int): Range = - if (n <= 0 || isEmpty) this - else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) - else { - // May have more than Int.MaxValue elements (numRangeElements < 0) - // but the logic is the same either way: go forwards n steps, keep the rest - copy(locationAfterN(n), end, step) - } - - /** Creates a new range consisting of the last `n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def takeRight(n: Int): Range = { - if (n <= 0) newEmptyRange(start) - else if (numRangeElements >= 0) drop(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - val x = y - step.toLong*(n-1) - if ((step > 0 && x < start) || (step < 0 && x > start)) this - else Range.inclusive(x.toInt, y, step) - } - } - - /** Creates a new range consisting of the initial `length - n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def dropRight(n: Int): Range = { - if (n <= 0) this - else if (numRangeElements >= 0) take(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - step.toInt*n - if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) - else Range.inclusive(start, y.toInt, step) - } - } - - // Advance from the start while we meet the given test - private[this] def argTakeWhile(p: Int => Boolean): Long = { - if (isEmpty) start - else { - var current = start - val stop = last - while (current != stop && p(current)) current += step - if (current != stop || !p(current)) current - else current.toLong + step - } - } - - final override def takeWhile(p: Int => Boolean): Range = { - val stop = argTakeWhile(p) - if (stop==start) newEmptyRange(start) - else { - val x = (stop - step).toInt - if (x == last) this - else Range.inclusive(start, x, step) - } - } - - final override def dropWhile(p: Int => Boolean): Range = { - val stop = argTakeWhile(p) - if (stop == start) this - else { - val x = (stop - step).toInt - if (x == last) newEmptyRange(last) - else Range.inclusive(x + step, last, step) - } - } - - final override def span(p: Int => Boolean): (Range, Range) = { - val border = argTakeWhile(p) - if (border == start) (newEmptyRange(start), this) - else { - val x = (border - step).toInt - if (x == last) (this, newEmptyRange(last)) - else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) - } - } - - /** Creates a new range containing the elements starting at `from` up to but not including `until`. - * - * $doesNotUseBuilders - * - * @param from the element at which to start - * @param until the element at which to end (not included in the range) - * @return a new range consisting of a contiguous interval of values in the old range - */ - final override def slice(from: Int, until: Int): Range = - if (from <= 0) take(until) - else if (until >= numRangeElements && numRangeElements >= 0) drop(from) - else { - val fromValue = locationAfterN(from) - if (from >= until) newEmptyRange(fromValue) - else Range.inclusive(fromValue, locationAfterN(until-1), step) - } - - // Overridden only to refine the return type - final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) - - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private[this] def locationAfterN(n: Int) = start + (step * n) - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) - - /** Returns the reverse of this range. - */ - final override def reverse: Range = - if (isEmpty) this - else new Range.Inclusive(last, start, -step) - - /** Make range inclusive. - */ - final def inclusive: Range = - if (isInclusive) this - else new Range.Inclusive(start, end, step) - - final def contains(x: Int): Boolean = { - if (x == end && !isInclusive) false - else if (step > 0) { - if (x < start || x > end) false - else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) - } - else { - if (x < end || x > start) false - else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) - } - } - /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ - override final def contains[B >: Int](elem: B): Boolean = elem match { - case i: Int => this.contains(i) - case _ => super.contains(elem) - } - - final override def sum[B >: Int](implicit num: Numeric[B]): Int = { - if (num eq scala.math.Numeric.IntIsIntegral) { - // this is normal integer range with usual addition. arithmetic series formula can be used - if (isEmpty) 0 - else if (size == 1) head - else ((size * (head.toLong + last)) / 2).toInt - } else { - // user provided custom Numeric, we cannot rely on arithmetic series formula - if (isEmpty) num.toInt(num.zero) - else { - var acc = num.zero - var i = head - while (true) { - acc = num.plus(acc, i) - if (i == lastElement) return num.toInt(acc) - i = i + step - } - 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing - } - } - } - - final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) head - else last - } else if (Ordering.Int isReverseOf ord) { - if (step > 0) last - else head - } else super.min(ord) - - final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) last - else head - } else if (Ordering.Int isReverseOf ord) { - if (step > 0) head - else last - } else super.max(ord) - - override def tails: Iterator[Range] = - new AbstractIterator[Range] { - private[this] var i = 0 - override def hasNext = i <= Range.this.length - override def next() = { - if (hasNext) { - val res = Range.this.drop(i) - i += 1 - res - } else { - Iterator.empty.next() - } - } - } - - override def inits: Iterator[Range] = - new AbstractIterator[Range] { - private[this] var i = 0 - override def hasNext = i <= Range.this.length - override def next() = { - if (hasNext) { - val res = Range.this.dropRight(i) - i += 1 - res - } else { - Iterator.empty.next() - } - } - } - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - - final override def equals(other: Any): Boolean = other match { - case x: Range => - // Note: this must succeed for overfull ranges (length > Int.MaxValue) - if (isEmpty) x.isEmpty // empty sequences are equal - else // this is non-empty... - x.nonEmpty && start == x.start && { // ...so other must contain something and have same start - val l0 = last - (l0 == x.last && ( // And same end - start == l0 || step == x.step // And either the same step, or not take any steps - )) - } - case _ => - super.equals(other) - } - - final override def hashCode: Int = - if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) - else super.hashCode - - final override def toString: String = { - val preposition = if (isInclusive) "to" else "until" - val stepped = if (step == 1) "" else s" by $step" - val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" - s"${prefix}Range $start $preposition $end$stepped" - } - - override protected[this] def className = "Range" - - override def distinct: Range = this - - override def grouped(size: Int): Iterator[Range] = { - require(size >= 1, f"size=$size%d, but size must be positive") - if (isEmpty) { - Iterator.empty - } else { - val s = size - new AbstractIterator[Range] { - private[this] var i = 0 - override def hasNext = Range.this.length > i - override def next() = - if (hasNext) { - val x = Range.this.slice(i, i + s) - i += s - x - } else { - Iterator.empty.next() - } - } - } - } - - override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = - if (ord eq Ordering.Int) { - if (step > 0) { - this - } else { - reverse - } - } else { - super.sorted(ord) - } -} - -/** - * Companion object for ranges. - * @define Coll `Range` - * @define coll range - */ -object Range { - - /** Counts the number of range elements. - * precondition: step != 0 - * If the size of the range exceeds Int.MaxValue, the - * result will be negative. - */ - def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { - if (step == 0) - throw new IllegalArgumentException("step cannot be 0.") - - val isEmpty = - if (start == end) !isInclusive - else if (start < end) step < 0 - else step > 0 - - if (isEmpty) 0 - else { - // Counts with Longs so we can recognize too-large ranges. - val gap: Long = end.toLong - start.toLong - val jumps: Long = gap / step - // Whether the size of this range is one larger than the - // number of full-sized jumps. - val hasStub = isInclusive || (gap % step != 0) - val result: Long = jumps + ( if (hasStub) 1 else 0 ) - - if (result > scala.Int.MaxValue) -1 - else result.toInt - } - } - def count(start: Int, end: Int, step: Int): Int = - count(start, end, step, isInclusive = false) - - /** Make a range from `start` until `end` (exclusive) with given step value. - * @note step != 0 - */ - def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) - - /** Make a range from `start` until `end` (exclusive) with step value 1. - */ - def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) - - /** Make an inclusive range from `start` to `end` with given step value. - * @note step != 0 - */ - def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) - - /** Make an inclusive range from `start` to `end` with step value 1. - */ - def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) - - @SerialVersionUID(3L) - final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { - def isInclusive: Boolean = true - } - - @SerialVersionUID(3L) - final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { - def isInclusive: Boolean = false - } - - // BigInt and Long are straightforward generic ranges. - object BigInt { - def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) - def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) - } - - object Long { - def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) - def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) - } - - // BigDecimal uses an alternative implementation of Numeric in which - // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for - // details. The intention is for it to throw an exception anytime - // imprecision or surprises might result from anything, although this may - // not yet be fully implemented. - object BigDecimal { - implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral - - def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = - NumericRange(start, end, step) - def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = - NumericRange.inclusive(start, end, step) - } - - // As there is no appealing default step size for not-really-integral ranges, - // we offer a partially constructed object. - class Partial[T, U](private val f: T -> U) extends AnyVal { - def by(x: T): U = f(x) - override def toString = "Range requires step" - } - - // Illustrating genericity with Int Range, which should have the same behavior - // as the original Range class. However we leave the original Range - // indefinitely, for performance and because the compiler seems to bootstrap - // off it and won't do so with our parameterized version without modifications. - object Int { - def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) - def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) - } - - private def emptyRangeError(what: String): Throwable = - new NoSuchElementException(what + " on empty Range") -} - -/** - * @param lastElement The last element included in the Range - * @param initiallyEmpty Whether the Range was initially empty or not - */ -@SerialVersionUID(3L) -private class RangeIterator( - start: Int, - step: Int, - lastElement: Int, - initiallyEmpty: Boolean -) extends AbstractIterator[Int] with Serializable { - private[this] var _hasNext: Boolean = !initiallyEmpty - private[this] var _next: Int = start - override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 - def hasNext: Boolean = _hasNext - @throws[NoSuchElementException] - def next(): Int = { - if (!_hasNext) Iterator.empty.next() - val value = _next - _hasNext = value != lastElement - _next = value + step - value - } - - override def drop(n: Int): Iterator[Int] = { - if (n > 0) { - val longPos = _next.toLong + step * n - if (step > 0) { - _next = Math.min(lastElement, longPos).toInt - _hasNext = longPos <= lastElement - } - else if (step < 0) { - _next = Math.max(lastElement, longPos).toInt - _hasNext = longPos >= lastElement - } - } - this - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala deleted file mode 100644 index 5fbc927d7a21..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala +++ /dev/null @@ -1,1234 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.meta.{getter, setter} -import scala.annotation.tailrec -import scala.runtime.Statics.releaseFence -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. - * - * Implementation note: since efficiency is important for data structures this implementation - * uses `null` to represent empty trees. This also means pattern matching cannot - * easily be used. The API represented by the RedBlackTree object tries to hide these - * optimizations behind a reasonably clean API. - */ -private[collection] object RedBlackTree { - - def isEmpty(tree: Tree[_, _]): Boolean = tree eq null - - def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null - def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { - case null => None - case tree => Some(tree.value) - } - - @tailrec - def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp < 0) lookup(tree.left, x) - else if (cmp > 0) lookup(tree.right, x) - else tree - } - private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { - def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) tree - else if (tree.isMutable) { - val res = tree.mutableBlack.makeImmutable - releaseFence() - res - } else tree.black - } - /** Create a new balanced tree where `newLeft` replaces `tree.left`. - * tree and newLeft are never null */ - protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { - // Parameter trees - // tree | newLeft - // -- KV R | nl.L nl.KV nl.R - // | nl.R.L nl.R.KV nl.R.R - //Note - unlike the immutable trees we can't consider tree.left eq newLeft - //as the balance operations may mutate the same object - //but that check was mostly to avoid the object creation - if (newLeft.isRed) { - val newLeft_left = newLeft.left - val newLeft_right = newLeft.right - if (isRedTree(newLeft_left)) { - // RED - // black(nl.L) nl.KV black - // nl.R KV R - val resultLeft = newLeft_left.mutableBlack - val resultRight = tree.mutableBlackWithLeft(newLeft_right) - - newLeft.mutableWithLeftRight(resultLeft, resultRight) - } else if (isRedTree(newLeft_right)) { - // RED - // black nl.R.KV black - // nl.L nl.KV nl.R.L nl.R.R KV R - - val newLeft_right_right = newLeft_right.right - - val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left) - val resultRight = tree.mutableBlackWithLeft(newLeft_right_right) - - newLeft_right.mutableWithLeftRight(resultLeft, resultRight) - } else { - // tree - // newLeft KV R - tree.mutableWithLeft(newLeft) - } - } else { - // tree - // newLeft KV R - tree.mutableWithLeft(newLeft) - } - } - /** Create a new balanced tree where `newRight` replaces `tree.right`. - * tree and newRight are never null */ - protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { - // Parameter trees - // tree | newRight - // L KV -- | nr.L nr.KV nr.R - // | nr.L.L nr.L.KV nr.L.R - //Note - unlike the immutable trees we can't consider tree.right eq newRight - //as the balance operations may mutate the same object - //but that check was mostly to avoid the object creation - if (newRight.isRed) { - val newRight_left = newRight.left - if (isRedTree(newRight_left)) { - // RED - // black nr.L.KV black - // L KV nr.L.L nr.L.R nr.KV nr.R - - val resultLeft = tree.mutableBlackWithRight(newRight_left.left) - val resultRight = newRight.mutableBlackWithLeft(newRight_left.right) - - newRight_left.mutableWithLeftRight(resultLeft, resultRight) - - } else { - val newRight_right = newRight.right - if (isRedTree(newRight_right)) { - // RED - // black nr.KV black(nr.R) - // L KV nr.L - - val resultLeft = tree.mutableBlackWithRight(newRight_left) - val resultRight = newRight_right.mutableBlack - - newRight.mutableWithLeftRight(resultLeft, resultRight) - } else { - // tree - // L KV newRight - tree.mutableWithRight(newRight) - } - } - } else { - // tree - // L KV newRight - tree.mutableWithRight(newRight) - } - } - } - private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { - protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = - if (tree eq null) { - mutableRedTree(k, (), null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { - tree - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) - mutableBalanceLeft(tree, mutableUpd(tree.left, k)) - else if (cmp > 0) - mutableBalanceRight(tree, mutableUpd(tree.right, k)) - else tree - } - } - private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { - protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = - if (tree eq null) { - mutableRedTree(k, v, null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { - tree.mutableWithV(v) - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) - mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) - else if (cmp > 0) - mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) - else tree.mutableWithV(v) - } - } - - def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count - def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) - def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) - def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { - case (Some(from), Some(until)) => this.range(tree, from, until) - case (Some(from), None) => this.from(tree, from) - case (None, Some(until)) => this.until(tree, until) - case (None, None) => tree - } - def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) - def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) - def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) - def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) - - def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) - def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) - def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) - - def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty tree") - var result = tree - while (result.left ne null) result = result.left - result - } - def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty tree") - var result = tree - while (result.right ne null) result = result.right - result - } - - def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { - def _tail(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) throw new NoSuchElementException("empty tree") - else { - val tl = tree.left - if (tl eq null) tree.right - else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right) - else tree.redWithLeft(_tail(tree.left)) - } - blacken(_tail(tree)) - } - - def init[A, B](tree: Tree[A, B]): Tree[A, B] = { - def _init(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) throw new NoSuchElementException("empty tree") - else { - val tr = tree.right - if (tr eq null) tree.left - else if (tr.isBlack) balRight(tree, tree.left, _init(tr)) - else tree.redWithRight(_init(tr)) - } - blacken(_init(tree)) - } - - /** - * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. - */ - def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp == 0) tree - else if (cmp < 0) { - val l = minAfter(tree.left, x) - if (l != null) l else tree - } else minAfter(tree.right, x) - } - - /** - * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. - */ - def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp <= 0) maxBefore(tree.left, x) - else { - val r = maxBefore(tree.right, x) - if (r != null) r else tree - } - } - - def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) - - def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { - if (a eq b) true - else if (a eq null) false - else if (b eq null) false - else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) - } - def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { - if (a eq b) true - else if (a eq null) false - else if (b eq null) false - else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) - } - def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { - if (a eq b) true - else if (a eq null) false - else if (b eq null) false - else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b)) - } - - private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = { - if (tree.left ne null) _foreach(tree.left, f) - f((tree.key, tree.value)) - if (tree.right ne null) _foreach(tree.right, f) - } - - def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) - - private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { - if (tree.left ne null) _foreachKey(tree.left, f) - f((tree.key)) - if (tree.right ne null) _foreachKey(tree.right, f) - } - - def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) - - private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { - if (tree.left ne null) _foreachEntry(tree.left, f) - f(tree.key, tree.value) - if (tree.right ne null) _foreachEntry(tree.right, f) - } - - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) - - @tailrec - def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - val count = this.count(tree.left) - if (n < count) nth(tree.left, n) - else if (n > count) nth(tree.right, n - count - 1) - else tree - } - - def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack - - @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed - @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack - - private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black - - // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` - // for building subtrees. Use `blacken` instead when building top-level trees. - private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = - if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t - - private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { - val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) - new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) - } - - /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ - private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { - // Parameter trees - // tree | newLeft - // -- KV R | nl.L nl.KV nl.R - // | nl.R.L nl.R.KV nl.R.R - if (tree.left eq newLeft) tree - else { - if (newLeft.isRed) { - val newLeft_left = newLeft.left - val newLeft_right = newLeft.right - if (isRedTree(newLeft_left)) { - // RED - // black(nl.L) nl.KV black - // nl.R KV R - val resultLeft = newLeft_left.black - val resultRight = tree.blackWithLeft(newLeft_right) - - newLeft.withLeftRight(resultLeft, resultRight) - } else if (isRedTree(newLeft_right)) { - // RED - // black nl.R.KV black - // nl.L nl.KV nl.R.L nl.R.R KV R - val newLeft_right_right = newLeft_right.right - - val resultLeft = newLeft.blackWithRight(newLeft_right.left) - val resultRight = tree.blackWithLeft(newLeft_right_right) - - newLeft_right.withLeftRight(resultLeft, resultRight) - } else { - // tree - // newLeft KV R - tree.withLeft(newLeft) - } - } else { - // tree - // newLeft KV R - tree.withLeft(newLeft) - } - } - } - /** Create a new balanced tree where `newRight` replaces `tree.right`. */ - private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - // Parameter trees - // tree | newRight - // L KV -- | nr.L nr.KV nr.R - // | nr.L.L nr.L.KV nr.L.R - if (tree.right eq newRight) tree - else { - if (newRight.isRed) { - val newRight_left = newRight.left - if (isRedTree(newRight_left)) { - // RED - // black nr.L.KV black - // L KV nr.L.L nr.L.R nr.KV nr.R - val resultLeft = tree.blackWithRight(newRight_left.left) - val resultRight = newRight.blackWithLeft(newRight_left.right) - - newRight_left.withLeftRight(resultLeft, resultRight) - } else { - val newRight_right = newRight.right - if (isRedTree(newRight_right)) { - // RED - // black nr.KV black(nr.R) - // L KV nr.L - val resultLeft = tree.blackWithRight(newRight_left) - val resultRight = newRight_right.black - - newRight.withLeftRight(resultLeft, resultRight) - } else { - // tree - // L KV newRight - tree.withRight(newRight) - } - } - } else { - // tree - // L KV newRight - tree.withRight(newRight) - } - } - } - - private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { - if (overwrite) - tree.withV(v) - else tree - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) - balanceLeft(tree, upd(tree.left, k, v, overwrite)) - else if (cmp > 0) - balanceRight(tree, upd(tree.right, k, v, overwrite)) - else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) - tree.withV(v) - else tree - } - private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) - } else { - val rank = count(tree.left) + 1 - if (idx < rank) - balanceLeft(tree, updNth(tree.left, idx, k, v)) - else if (idx > rank) - balanceRight(tree, updNth(tree.right, idx - rank, k, v)) - else tree - } - - private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) - val newLeft = doFrom(tree.left, from) - if (newLeft eq tree.left) tree - else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) - else join(newLeft, tree.key, tree.value, tree.right) - } - private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(to, tree.key)) return doTo(tree.left, to) - val newRight = doTo(tree.right, to) - if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) - else join (tree.left, tree.key, tree.value, newRight) - } - private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) - val newRight = doUntil(tree.right, until) - if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) - else join(tree.left, tree.key, tree.value, newRight) - } - - private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) - if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) - val newLeft = doFrom(tree.left, from) - val newRight = doUntil(tree.right, until) - if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) - else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) - else join(newLeft, tree.key, tree.value, newRight) - } - - private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = - if((tree eq null) || (n <= 0)) tree - else if(n >= tree.count) null - else { - val l = count(tree.left) - if(n > l) doDrop(tree.right, n-l-1) - else if(n == l) join(null, tree.key, tree.value, tree.right) - else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) - } - - private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = - if((tree eq null) || (n <= 0)) null - else if(n >= tree.count) tree - else { - val l = count(tree.left) - if(n <= l) doTake(tree.left, n) - else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) - else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) - } - - private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = - if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null - else if((from <= 0) && (until >= tree.count)) tree - else { - val l = count(tree.left) - if(until <= l) doSlice(tree.left, from, until) - else if(from > l) doSlice(tree.right, from-l-1, until-l-1) - else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1)) - } - - /* - * Forcing direct fields access using the @`inline` annotation helps speed up - * various operations (especially smallest/greatest and update/delete). - * - * Unfortunately the direct field access is not guaranteed to work (but - * works on the current implementation of the Scala compiler). - * - * An alternative is to implement the these classes using plain old Java code... - * - * Mutability - * This implementation encodes both mutable and immutable trees. - * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations - * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk - * API such as filter or ++ - * - * Mutable trees are only used within the confines of this bulk operation and not shared - * Mutable trees may transition to become immutable by calling beforePublish - * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing) - * - * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable - * trees the entire transitive subtree is immutable - * - * Colour, mutablity and size encoding - * The colour of the Tree, its mutablity and size are all encoded in the _count field - * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without - * additional allocation - * The mutable trees always have bits 0 .. 30 (inclusive) set to 0 - * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree - * - * Naming - * All of the methods that can yield a mutable result have "mutable" on their name, and generally there - * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when - * reviewing changes. e.g. - * def upd(...) will update an immutable Tree, producing an immutable Tree - * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree - * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree - * - */ - private[immutable] final class Tree[A, +B]( - @(`inline` @getter @setter) private var _key: A, - @(`inline` @getter @setter) private var _value: AnyRef, - @(`inline` @getter @setter) private var _left: Tree[A, _], - @(`inline` @getter @setter) private var _right: Tree[A, _], - @(`inline` @getter @setter) private var _count: Int) - { - @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 - // read only APIs - @`inline` private[RedBlackTree] final def count = { - //devTimeAssert((_count & 0x7FFFFFFF) != 0) - _count & colourMask - } - //retain the colour, and mark as mutable - @`inline` private def mutableRetainingColour = _count & colourBit - - //inlined here to avoid outer object null checks - @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count - @`inline` private[immutable] final def key = _key - @`inline` private[immutable] final def value = _value.asInstanceOf[B] - @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] - @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] - //Note - only used in tests outside RedBlackTree - @`inline` private[immutable] final def isBlack = _count < 0 - //Note - only used in tests outside RedBlackTree - @`inline` private[immutable] final def isRed = _count >= 0 - - override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" - - //mutable APIs - private[RedBlackTree] def makeImmutable: Tree[A, B] = { - def makeImmutableImpl() = { - if (isMutable) { - var size = 1 - if (_left ne null) { - _left.makeImmutable - size += _left.count - } - if (_right ne null) { - _right.makeImmutable - size += _right.count - } - _count |= size //retains colour - } - this - } - makeImmutableImpl() - this - } - - private[RedBlackTree] def mutableBlack: Tree[A, B] = { - if (isBlack) this - else if (isMutable) { - _count = initialBlackCount - this - } - else new Tree(_key, _value, _left, _right, initialBlackCount) - } -// private[RedBlackTree] def mutableRed: Tree[A, B] = { -// if (isRed) this -// else if (mutable) { -// _count = initialRedCount -// this -// } -// else new Tree(_key, _value, _left, _right, initialRedCount) -// } - - private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { - if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this - else if (isMutable) { - _value = newValue.asInstanceOf[AnyRef] - this - } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) - } - - private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - if (_left eq newLeft) this - else if (isMutable) { - _left = newLeft - this - } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) - } - private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - if (_right eq newRight) this - else if (isMutable) { - _right = newRight - this - } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) - } - private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - if ((_left eq newLeft) && (_right eq newRight)) this - else if (isMutable) { - _left = newLeft - _right = newRight - this - } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) - } - private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - if ((_left eq newLeft) && isBlack) this - else if (isMutable) { - _count = initialBlackCount - _left = newLeft - this - } else new Tree(_key, _value, newLeft, _right, initialBlackCount) - } - private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - if ((_right eq newRight) && isBlack) this - else if (isMutable) { - _count = initialBlackCount - _right = newRight - this - } else new Tree(_key, _value, _left, newRight, initialBlackCount) - } - - private[RedBlackTree] def black: Tree[A, B] = { - //assertNotMutable(this) - if (isBlack) this - else new Tree(_key, _value, _left, _right, _count ^ colourBit) - } - private[RedBlackTree] def red: Tree[A, B] = { - //assertNotMutable(this) - if (isRed) this - else new Tree(_key, _value, _left, _right, _count ^ colourBit) - } - private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { - //assertNotMutable(this) - if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && - (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this - else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) - } - private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { - //assertNotMutable(this) - if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this - else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) - } - - private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if (newLeft eq _left) this - else { - val size = sizeOf(newLeft) + sizeOf(_right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) - } - } - private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newRight) - if (newRight eq _right) this - else { - val size = sizeOf(_left) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) - } - } - private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if ((newLeft eq _left) && isBlack) this - else { - val size = sizeOf(newLeft) + sizeOf(_right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) - } - } - private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if ((newLeft eq _left) && isRed) this - else { - val size = sizeOf(newLeft) + sizeOf(_right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) - } - } - private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newRight) - if ((newRight eq _right) && isBlack) this - else { - val size = sizeOf(_left) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) - } - } - private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if ((newRight eq _right) && isRed) this - else { - val size = sizeOf(_left) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) - } - } - private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - //assertNotMutable(newRight) - if ((newLeft eq _left) && (newRight eq _right)) this - else { - val size = sizeOf(newLeft) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) - } - } - private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - //assertNotMutable(newRight) - if ((newLeft eq _left) && (newRight eq _right) && isRed) this - else { - val size = sizeOf(newLeft) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) - } - } - private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - //assertNotMutable(newRight) - if ((newLeft eq _left) && (newRight eq _right) && isBlack) this - else { - val size = sizeOf(newLeft) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size) - } - } - } - //see #Tree docs "Colour, mutablity and size encoding" - //we make these final vals because the optimiser inlines them, without reference to the enclosing module - private[RedBlackTree] final val colourBit = 0x80000000 - //really its ~colourBit but that doesnt get inlined - private[RedBlackTree] final val colourMask = colourBit - 1 - private[RedBlackTree] final val initialBlackCount = colourBit - private[RedBlackTree] final val initialRedCount = 0 - - @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) - @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) - - /** create a new immutable red tree. - * left and right may be null - */ - private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { - //assertNotMutable(left) - //assertNotMutable(right) - val size = sizeOf(left) + sizeOf(right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) - } - private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { - //assertNotMutable(left) - //assertNotMutable(right) - val size = sizeOf(left) + sizeOf(right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size) - } - @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count - //immutable APIs - //assertions - uncomment decls and callers when changing functionality - // private def devTimeAssert(assertion: Boolean) = { - // //uncomment this during development of the functionality - // assert(assertion) - // } - // private def assertNotMutable(t:Tree[_,_]) = { - // devTimeAssert ((t eq null) || t.count > 0) - // } - private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { - protected[this] def nextResult(tree: Tree[A, B]): R - - override def hasNext: Boolean = lookahead ne null - - @throws[NoSuchElementException] - override def next(): R = { - val tree = lookahead - if(tree ne null) { - lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) - nextResult(tree) - } else Iterator.empty.next() - } - - @tailrec - protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext() - else if (tree.left eq null) tree - else findLeftMostOrPopOnEmpty(goLeft(tree)) - - @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { - stackOfNexts(index) = tree - index += 1 - } - @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { - index -= 1 - stackOfNexts(index) - } - - protected[this] val stackOfNexts = if (root eq null) null else { - /* - * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] - * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. - * - * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) - * - * Although we don't store the deepest nodes in the path during iteration, - * we potentially do so in `startFrom`. - */ - val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - new Array[Tree[A, B] @uncheckedCaptures](maximumHeight) - } - private[this] var index = 0 - protected var lookahead: Tree[A, B] @uncheckedCaptures = - if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) - - /** - * Find the leftmost subtree whose key is equal to the given key, or if no such thing, - * the leftmost subtree with the key that would be "next" after it according - * to the ordering. Along the way build up the iterator's path stack so that "next" - * functionality works. - */ - private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { - @tailrec def find(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext() - else find( - if (ordering.lteq(key, tree.key)) goLeft(tree) - else goRight(tree) - ) - find(root) - } - - @`inline` private[this] def goLeft(tree: Tree[A, B]) = { - pushNext(tree) - tree.left - } - - @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right - } - - private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { - override def nextResult(tree: Tree[A, B]) = ??? - - def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { - var equal = true - while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { - if (this.lookahead eq that.lookahead) { - this.lookahead = this.popNext() - that.lookahead = that.popNext() - } else { - equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || - ordering.equiv(this.lookahead.key, that.lookahead.key) - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) - } - } - equal && (this.lookahead eq null) && (that.lookahead eq null) - } - def sameValues[X](that:EqualsIterator[A,X]): Boolean = { - var equal = true - while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { - if (this.lookahead eq that.lookahead) { - this.lookahead = this.popNext() - that.lookahead = that.popNext() - } else { - equal = this.lookahead.value == that.lookahead.value - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) - } - } - equal && (this.lookahead eq null) && (that.lookahead eq null) - } - def sameEntries[X](that:EqualsIterator[A,X]): Boolean = { - var equal = true - while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { - if (this.lookahead eq that.lookahead) { - this.lookahead = this.popNext() - that.lookahead = that.popNext() - } else { - equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || - ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) - } - } - equal && (this.lookahead eq null) && (that.lookahead eq null) - } - } - private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { - override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) - } - - private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { - override def nextResult(tree: Tree[A, B]) = tree.key - } - - private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { - override def nextResult(tree: Tree[A, B]) = tree.value - } - - /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ - def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Tree[A, Null] = size match { - case 0 => null - case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val x = xs.next() - val right = f(level+1, size-1-leftSize) - BlackTree(x, null, left, right) - } - f(1, size) - } - - /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ - def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Tree[A, B] = size match { - case 0 => null - case 1 => - val (k, v) = xs.next() - mkTree(level != maxUsedDepth || level == 1, k, v, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val (k, v) = xs.next() - val right = f(level+1, size-1-leftSize) - BlackTree(k, v, left, right) - } - f(1, size) - } - - def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] = - if(t eq null) null - else { - val k = t.key - val v = t.value - val l = t.left - val r = t.right - val l2 = transform(l, f) - val v2 = f(k, v) - val r2 = transform(r, f) - if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) - && (l2 eq l) - && (r2 eq r)) t.asInstanceOf[Tree[A, C]] - else mkTree(t.isBlack, k, v2, l2, r2) - } - - def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else { - def fk(t: Tree[A, B]): Tree[A, B] = { - val k = t.key - val v = t.value - val l = t.left - val r = t.right - val l2 = if(l eq null) null else fk(l) - val keep = f(k, v) - val r2 = if(r eq null) null else fk(r) - if(!keep) join2(l2, r2) - else if((l2 eq l) && (r2 eq r)) t - else join(l2, k, v, r2) - } - blacken(fk(t)) - } - - private[this] val null2 = (null, null) - - def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { - if (t eq null) null2 - else { - object partitioner { - var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk - def fk(t: Tree[A, B]): Unit = { - val k = t.key - val v = t.value - val l = t.left - val r = t.right - var l2k, l2d, r2k, r2d = null: Tree[A, B] - if (l ne null) { - fk(l) - l2k = tmpk - l2d = tmpd - } - val keep = p(k, v) - if (r ne null) { - fk(r) - r2k = tmpk - r2d = tmpd - } - val jk = - if (!keep) join2(l2k, r2k) - else if ((l2k eq l) && (r2k eq r)) t - else join(l2k, k, v, r2k) - val jd = - if (keep) join2(l2d, r2d) - else if ((l2d eq l) && (r2d eq r)) t - else join(l2d, k, v, r2d) - tmpk = jk - tmpd = jd - } - } - - partitioner.fk(t) - (blacken(partitioner.tmpk), blacken(partitioner.tmpd)) - } - } - - // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] - // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ - - private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) { - val newLeft = del(tree.left, k) - if (newLeft eq tree.left) tree - else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right) - else tree.redWithLeft(newLeft) - } else if (cmp > 0) { - val newRight = del(tree.right, k) - if (newRight eq tree.right) tree - else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight) - else tree.redWithRight(newRight) - } else append(tree.left, tree.right) - } - - private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if (isRedTree(tl)) { - if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black) - else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr)) - else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) - else tree.blackWithLeftRight(tl, tr) - } else if (isRedTree(tr)) { - if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black) - else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right)) - else tree.blackWithLeftRight(tl, tr) - } else tree.blackWithLeftRight(tl, tr) - - private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr) - else if (isBlackTree(tr)) balance(tree, tl, tr.red) - else if (isRedTree(tr) && isBlackTree(tr.left)) - tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red)) - else sys.error("Defect: invariance violation") - - private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black) - else if (isBlackTree(tl)) balance(tree, tl.red, tr) - else if (isRedTree(tl) && isBlackTree(tl.right)) - tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) - else sys.error("Defect: invariance violation") - - /** `append` is similar to `join2` but requires that both subtrees have the same black height */ - private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = { - if (tl eq null) tr - else if (tr eq null) tl - else if (tl.isRed) { - if (tr.isRed) { - //tl is red, tr is red - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) - else tl.withRight(tr.withLeft(bc)) - } else { - //tl is red, tr is black - tl.withRight(append(tl.right, tr)) - } - } else { - if (tr.isBlack) { - //tl is black tr is black - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) - else balLeft(tl, tl.left, tr.withLeft(bc)) - } else { - //tl is black tr is red - tr.withLeft(append(tl, tr.left)) - } - } - } - - - // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf) - // We don't store the black height in the tree so we pass it down into the join methods and derive the black height - // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it. - // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference. - - def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2)) - - def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2)) - - def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] = - blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]])) - - /** Compute the rank from a tree and its black height */ - @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = { - if(t eq null) 0 - else if(t.isBlack) 2*(bh-1) - else 2*bh-1 - } - - private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = { - val rtl = rank(tl, bhtl) - if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr) - else { - val tlBlack = isBlackTree(tl) - val bhtlr = if(tlBlack) bhtl-1 else bhtl - val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr) - if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right)) - RedTree(ttr.key, ttr.value, - BlackTree(tl.key, tl.value, tl.left, ttr.left), - ttr.right.black) - else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr) - } - } - - private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = { - val rtr = rank(tr, bhtr) - if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr) - else { - val trBlack = isBlackTree(tr) - val bhtrl = if(trBlack) bhtr-1 else bhtr - val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl) - if(trBlack && isRedTree(ttl) && isRedTree(ttl.left)) - RedTree(ttl.key, ttl.value, - ttl.left.black, - BlackTree(tr.key, tr.value, ttl.right, tr.right)) - else mkTree(trBlack, tr.key, tr.value, ttl, tr.right) - } - } - - private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = { - @tailrec def h(t: Tree[_, _], i: Int): Int = - if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i) - val bhtl = h(tl, 0) - val bhtr = h(tr, 0) - if(bhtl > bhtr) { - val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr)) - if(isRedTree(tt) && isRedTree(tt.right)) tt.black - else tt - } else if(bhtr > bhtl) { - val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr) - if(isRedTree(tt) && isRedTree(tt.left)) tt.black - else tt - } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr) - } - - private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) = - if(t eq null) (null, null, null, k2) - else { - val cmp = ordering.compare(k2, t.key) - if(cmp == 0) (t.left, t, t.right, t.key) - else if(cmp < 0) { - val (ll, b, lr, k1) = split(t.left, k2) - (ll, b, join(lr, t.key, t.value, t.right), k1) - } else { - val (rl, b, rr, k1) = split(t.right, k2) - (join(t.left, t.key, t.value, rl), b, rr, k1) - } - } - - private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) = - if(t.right eq null) (t.left, t.key, t.value) - else { - val (tt, kk, vv) = splitLast(t.right) - (join(t.left, t.key, t.value, tt), kk, vv) - } - - private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if(tl eq null) tr - else if(tr eq null) tl - else { - val (ttl, k, v) = splitLast(tl) - join(ttl, k, v, tr) - } - - private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = - if((t1 eq null) || (t1 eq t2)) t2 - else if(t2 eq null) t1 - else { - val (l1, _, r1, k1) = split(t1, t2.key) - val tl = _union(l1, t2.left) - val tr = _union(r1, t2.right) - join(tl, k1, t2.value, tr) - } - - private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = - if((t1 eq null) || (t2 eq null)) null - else if (t1 eq t2) t1 - else { - val (l1, b, r1, k1) = split(t1, t2.key) - val tl = _intersect(l1, t2.left) - val tr = _intersect(r1, t2.right) - if(b ne null) join(tl, k1, t2.value, tr) - else join2(tl, tr) - } - - private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = - if((t1 eq null) || (t2 eq null)) t1 - else if (t1 eq t2) null - else { - val (l1, _, r1, k1) = split(t1, t2.key) - val tl = _difference(l1, t2.left) - val tr = _difference(r1, t2.right) - join2(tl, tr) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index d575c3aaf14a..5184cadaccae 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -30,7 +30,7 @@ trait Seq[+A] extends Iterable[A] * @define coll immutable sequence * @define Coll `immutable.Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] +trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] /** * $factoryInfo diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala deleted file mode 100644 index 6c955fd52fc2..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.mutable.{Builder, ReusableBuilder} -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** A base trait for ordered, immutable maps. - * - * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs - * without regard to ordering. - * - * All behavior is defined in terms of the abstract methods in `SeqMap`. - * It is sufficient for concrete subclasses to implement those methods. - * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. - * - * @tparam K the type of the keys contained in this linked map. - * @tparam V the type of the values associated with the keys in this linked map. - * - * @define coll immutable seq map - * @define Coll `immutable.SeqMap` - */ - -trait SeqMap[K, +V] - extends Map[K, V] - with collection.SeqMap[K, V] - with MapOps[K, V, SeqMap, SeqMap[K, V]] - with MapFactoryDefaults[K, V, SeqMap, Iterable] { - override def mapFactory: MapFactory[SeqMap] = SeqMap -} - - -object SeqMap extends MapFactory[SeqMap] { - def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] = - it match { - case sm: SeqMap[K, V] => sm - case _ => (newBuilder[K, V] ++= it).result() - } - - def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl - - @SerialVersionUID(3L) - private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { - override def size: Int = 0 - override def knownSize: Int = 0 - override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) - override def contains(key: Any) = false - def get(key: Any): Option[Nothing] = None - override def getOrElse [V1](key: Any, default: => V1): V1 = default - def iterator: Iterator[(Any, Nothing)] = Iterator.empty - def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) - def removed(key: Any): SeqMap[Any, Nothing] = this - } - - @SerialVersionUID(3L) - private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 1 - override def knownSize: Int = 1 - override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = key == key1 - def get(key: K): Option[V] = - if (key == key1) Some(value1) else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 else default - def iterator = Iterator.single((key1, value1)) - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap1(key1, value) - else new SeqMap2(key1, value1, key, value) - def removed(key: K): SeqMap[K, V] = - if (key == key1) SeqMap.empty else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - } - } - - @SerialVersionUID(3L) - private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 2 - override def knownSize: Int = 2 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else default - def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap2(key1, value, key2, value2) - else if (key == key2) new SeqMap2(key1, value1, key2, value) - else new SeqMap3(key1, value1, key2, value2, key, value) - def removed(key: K): SeqMap[K, V] = - if (key == key1) new SeqMap1(key2, value2) - else if (key == key2) new SeqMap1(key1, value1) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - } - } - - @SerialVersionUID(3L) - private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 3 - override def knownSize: Int = 3 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else default - def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) - else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) - else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) - else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) - def removed(key: K): SeqMap[K, V] = - if (key == key1) new SeqMap2(key2, value2, key3, value3) - else if (key == key2) new SeqMap2(key1, value1, key3, value3) - else if (key == key3) new SeqMap2(key1, value1, key2, value2) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - f(key3, value3) - } - } - - @SerialVersionUID(3L) - private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 4 - override def knownSize: Int = 4 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else if (key == key4) Some(value4) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else default - def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) - else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) - else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) - else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) - else { - // Directly create the elements for performance reasons - val fields = Vector(key1, key2, key3, key4, key) - val underlying: Map[K, (Int, V1)] = - HashMap( - (key1, (0, value1)), - (key2, (1, value2)), - (key3, (2, value3)), - (key4, (3, value4)), - (key, (4, value)) - ) - new VectorMap(fields, underlying) - } - def removed(key: K): SeqMap[K, V] = - if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) - else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) - else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) - else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - f(key3, value3) - f(key4, value4) - } - - private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = - builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) - } - - private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { - private[this] var elems: SeqMap[K, V] @uncheckedCaptures = SeqMap.empty - private[this] var switchedToVectorMapBuilder: Boolean = false - private[this] var vectorMapBuilder: VectorMapBuilder[K, V] @uncheckedCaptures = _ - - override def clear(): Unit = { - elems = SeqMap.empty - if (vectorMapBuilder != null) { - vectorMapBuilder.clear() - } - switchedToVectorMapBuilder = false - } - - override def result(): SeqMap[K, V] = - if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems - - def addOne(elem: (K, V)) = { - if (switchedToVectorMapBuilder) { - vectorMapBuilder.addOne(elem) - } else if (elems.size < 4) { - elems = elems + elem - } else { - // assert(elems.size == 4) - if (elems.contains(elem._1)) { - elems = elems + elem // will not increase the size of the map - } else { - switchedToVectorMapBuilder = true - if (vectorMapBuilder == null) { - vectorMapBuilder = new VectorMapBuilder - } - elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) - vectorMapBuilder.addOne(elem) - } - } - - this - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = - if (switchedToVectorMapBuilder) { - vectorMapBuilder.addAll(xs) - this - } else { - super.addAll(xs) - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala deleted file mode 100644 index ac92f81b2013..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Set.scala +++ /dev/null @@ -1,400 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.immutable.Set.Set4 -import scala.collection.mutable.{Builder, ReusableBuilder} -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** Base trait for immutable set collections */ -trait Set[A] extends Iterable[A] - with collection.Set[A] - with SetOps[A, Set, Set[A]] - with IterableFactoryDefaults[A, Set] { - override def iterableFactory: IterableFactory[Set] = Set -} - -/** Base trait for immutable set operations - * - * @define coll immutable set - * @define Coll `immutable.Set` - */ -trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] - extends collection.SetOps[A, CC, C] { - - /** Creates a new set with an additional element, unless the element is - * already present. - * - * @param elem the element to be added - * @return a new set that contains all elements of this set and that also - * contains `elem`. - */ - def incl(elem: A): C - - /** Alias for `incl` */ - override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated - - /** Creates a new set with a given element removed from this set. - * - * @param elem the element to be removed - * @return a new set that contains all elements of this set but that does not - * contain `elem`. - */ - def excl(elem: A): C - - /** Alias for `excl` */ - @`inline` final override def - (elem: A): C = excl(elem) - - def diff(that: collection.Set[A]): C = - foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * @param that the collection containing the elements to remove. - * @return a new $coll with the given elements removed, omitting duplicates. - */ - def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) - - /** Alias for removedAll */ - override final def -- (that: IterableOnce[A]): C = removedAll(that) -} - -trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] - extends SetOps[A, CC, C] - with collection.StrictOptimizedSetOps[A, CC, C] - with StrictOptimizedIterableOps[A, CC, C] { - - override def concat(that: collection.IterableOnce[A]): C = { - var result: C = coll - val it = that.iterator - while (it.hasNext) result = result + it.next() - result - } -} - -/** - * $factoryInfo - * @define coll immutable set - * @define Coll `immutable.Set` - */ -@SerialVersionUID(3L) -object Set extends IterableFactory[Set] { - - def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] - - def from[E](it: collection.IterableOnce[E]^): Set[E] = - it match { - // We want `SortedSet` (and subclasses, such as `BitSet`) to - // rebuild themselves to avoid element type widening issues - case _: SortedSet[E] => (newBuilder[E] ++= it).result() - case _ if it.knownSize == 0 => empty[E] - case s: Set[E] => s - case _ => (newBuilder[E] ++= it).result() - } - - def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] - - /** An optimized representation for immutable empty sets */ - @SerialVersionUID(3L) - private object EmptySet extends AbstractSet[Any] with Serializable { - override def size: Int = 0 - override def isEmpty = true - override def knownSize: Int = size - override def filter(pred: Any => Boolean): Set[Any] = this - override def filterNot(pred: Any => Boolean): Set[Any] = this - override def removedAll(that: IterableOnce[Any]): Set[Any] = this - override def diff(that: collection.Set[Any]): Set[Any] = this - override def subsetOf(that: collection.Set[Any]): Boolean = true - override def intersect(that: collection.Set[Any]): Set[Any] = this - override def view: View[Any] = View.empty - def contains(elem: Any): Boolean = false - def incl(elem: Any): Set[Any] = new Set1(elem) - def excl(elem: Any): Set[Any] = this - def iterator: Iterator[Any] = Iterator.empty - override def foreach[U](f: Any => U): Unit = () - } - private[collection] def emptyInstance: Set[Any] = EmptySet - - @SerialVersionUID(3L) - private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure { - private[this] var current = 0 - private[this] var remainder = n - override def knownSize: Int = remainder - def hasNext = remainder > 0 - def apply(i: Int): A - def next(): A = - if (hasNext) { - val r = apply(current) - current += 1 - remainder -= 1 - r - } else Iterator.empty.next() - - override def drop(n: Int): Iterator[A] = { - if (n > 0) { - current += n - remainder = Math.max(0, remainder - n) - } - this - } - } - - /** An optimized representation for immutable sets of size 1 */ - @SerialVersionUID(3L) - final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 1 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = elem == elem1 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else new Set2(elem1, elem) - def excl(elem: A): Set[A] = - if (elem == elem1) Set.empty - else this - def iterator: Iterator[A] = Iterator.single(elem1) - override def foreach[U](f: A => U): Unit = f(elem1) - override def exists(p: A => Boolean): Boolean = p(elem1) - override def forall(p: A => Boolean): Boolean = p(elem1) - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = - if (pred(elem1) != isFlipped) this else Set.empty - - override def find(p: A => Boolean): Option[A] = - if (p(elem1)) Some(elem1) - else None - override def head: A = elem1 - override def tail: Set[A] = Set.empty - } - - /** An optimized representation for immutable sets of size 2 */ - @SerialVersionUID(3L) - final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 2 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = elem == elem1 || elem == elem2 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else new Set3(elem1, elem2, elem) - def excl(elem: A): Set[A] = - if (elem == elem1) new Set1(elem2) - else if (elem == elem2) new Set1(elem1) - else this - def iterator: Iterator[A] = new SetNIterator[A](size) { - def apply(i: Int) = getElem(i) - } - private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 } - - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2) - } - override def exists(p: A => Boolean): Boolean = { - p(elem1) || p(elem2) - } - override def forall(p: A => Boolean): Boolean = { - p(elem1) && p(elem2) - } - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { - var r1: A = null.asInstanceOf[A] - var n = 0 - if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} - if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1} - - n match { - case 0 => Set.empty - case 1 => new Set1(r1) - case 2 => this - } - } - override def find(p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set1(elem2) - } - - /** An optimized representation for immutable sets of size 3 */ - @SerialVersionUID(3L) - final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 3 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 || elem == elem3 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else new Set4(elem1, elem2, elem3, elem) - def excl(elem: A): Set[A] = - if (elem == elem1) new Set2(elem2, elem3) - else if (elem == elem2) new Set2(elem1, elem3) - else if (elem == elem3) new Set2(elem1, elem2) - else this - def iterator: Iterator[A] = new SetNIterator[A](size) { - def apply(i: Int) = getElem(i) - } - private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 } - - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2); f(elem3) - } - override def exists(p: A => Boolean): Boolean = { - p(elem1) || p(elem2) || p(elem3) - } - override def forall(p: A => Boolean): Boolean = { - p(elem1) && p(elem2) && p(elem3) - } - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { - var r1, r2: A = null.asInstanceOf[A] - var n = 0 - if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} - if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} - if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1} - - n match { - case 0 => Set.empty - case 1 => new Set1(r1) - case 2 => new Set2(r1, r2) - case 3 => this - } - } - override def find(p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else if (p(elem3)) Some(elem3) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set2(elem2, elem3) - } - - /** An optimized representation for immutable sets of size 4 */ - @SerialVersionUID(3L) - final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 4 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem - def excl(elem: A): Set[A] = - if (elem == elem1) new Set3(elem2, elem3, elem4) - else if (elem == elem2) new Set3(elem1, elem3, elem4) - else if (elem == elem3) new Set3(elem1, elem2, elem4) - else if (elem == elem4) new Set3(elem1, elem2, elem3) - else this - def iterator: Iterator[A] = new SetNIterator[A](size) { - def apply(i: Int) = getElem(i) - } - private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 } - - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2); f(elem3); f(elem4) - } - override def exists(p: A => Boolean): Boolean = { - p(elem1) || p(elem2) || p(elem3) || p(elem4) - } - override def forall(p: A => Boolean): Boolean = { - p(elem1) && p(elem2) && p(elem3) && p(elem4) - } - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { - var r1, r2, r3: A = null.asInstanceOf[A] - var n = 0 - if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} - if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} - if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1} - if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1} - - n match { - case 0 => Set.empty - case 1 => new Set1(r1) - case 2 => new Set2(r1, r2) - case 3 => new Set3(r1, r2, r3) - case 4 => this - } - } - - override def find(p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else if (p(elem3)) Some(elem3) - else if (p(elem4)) Some(elem4) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set3(elem2, elem3, elem4) - - private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = - builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) - } -} - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] - -/** Builder for Set. - * $multipleResults - */ -private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { - private[this] var elems: Set[A @uncheckedCaptures] = Set.empty - private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A @uncheckedCaptures] = _ - - override def clear(): Unit = { - elems = Set.empty - if (hashSetBuilder != null) { - hashSetBuilder.clear() - } - switchedToHashSetBuilder = false - } - - override def result(): Set[A] = - if (switchedToHashSetBuilder) hashSetBuilder.result() else elems - - def addOne(elem: A) = { - if (switchedToHashSetBuilder) { - hashSetBuilder.addOne(elem) - } else if (elems.size < 4) { - elems = elems + elem - } else { - // assert(elems.size == 4) - if (elems.contains(elem)) { - () // do nothing - } else { - switchedToHashSetBuilder = true - if (hashSetBuilder == null) { - hashSetBuilder = new HashSetBuilder - } - elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) - hashSetBuilder.addOne(elem) - } - } - - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = - if (switchedToHashSetBuilder) { - hashSetBuilder.addAll(xs) - this - } else { - super.addAll(xs) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala deleted file mode 100644 index 9587502fd908..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.mutable.Builder -import language.experimental.captureChecking - -/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. - * - * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in - * sorted order, according to the map's [[scala.math.Ordering]]. - * - * @example {{{ - * import scala.collection.immutable.SortedMap - * - * // Make a SortedMap via the companion object factory - * val weekdays = SortedMap( - * 2 -> "Monday", - * 3 -> "Tuesday", - * 4 -> "Wednesday", - * 5 -> "Thursday", - * 6 -> "Friday" - * ) - * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) - * - * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") - * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * - * val day3 = days.get(3) // Some("Tuesday") - * - * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) - * - * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) - * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) - * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * }}} - * - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - */ -trait SortedMap[K, +V] - extends Map[K, V] - with collection.SortedMap[K, V] - with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] - with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { - - override def unsorted: Map[K, V] = this - - override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - - /** The same map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) -} - -trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => - - protected def coll: C with CC[K, V] - - def unsorted: Map[K, V] - - override def keySet: SortedSet[K] = new ImmutableKeySortedSet - - /** The implementation class of the set returned by `keySet` */ - protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { - def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { - val map = self.rangeImpl(from, until) - new map.ImmutableKeySortedSet - } - def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) - def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) - } - - // We override these methods to fix their return type (which would be `Map` otherwise) - def updated[V1 >: V](key: K, value: V1): CC[K, V1] - @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) - override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { - // Implementation has been copied from `MapOps` - val previousValue = this.get(key) - remappingFunction(previousValue) match { - case None => previousValue.fold(coll)(_ => this.removed(key).coll) - case Some(nextValue) => - if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll - else coll.updated(key, nextValue) - } - } - override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) -} - -trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends SortedMapOps[K, V, CC, C] - with collection.StrictOptimizedSortedMapOps[K, V, CC, C] - with StrictOptimizedMapOps[K, V, Map, C] { - - override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = { - var result: CC[K, V2] = coll - val it = xs.iterator - while (it.hasNext) result = result + it.next() - result - } -} - -@SerialVersionUID(3L) -object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - - override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match { - case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm - case _ => super.from(it) - } - - final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V) - extends Map.WithDefault[K, V](underlying, defaultValue) - with SortedMap[K, V] - with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { - - implicit def ordering: Ordering[K] = underlying.ordering - - override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory - - def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) - - def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) - - def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = - new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) - - // Need to override following methods to match type signatures of `SortedMap.WithDefault` - // for operations preserving default value - - override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = - new WithDefault[K, V1](underlying.updated(key, value), defaultValue) - - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = - new WithDefault( underlying.concat(xs) , defaultValue) - - override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = - new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = - SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala deleted file mode 100644 index 874abcaecda1..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable -import language.experimental.captureChecking - -/** Base trait for sorted sets */ -trait SortedSet[A] - extends Set[A] - with collection.SortedSet[A] - with SortedSetOps[A, SortedSet, SortedSet[A]] - with SortedSetFactoryDefaults[A, SortedSet, Set] { - - override def unsorted: Set[A] = this - - override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet -} - -/** - * @define coll immutable sorted set - * @define Coll `immutable.SortedSet` - */ -trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SetOps[A, Set, C] - with collection.SortedSetOps[A, CC, C] { - - def unsorted: Set[A] -} - -trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SortedSetOps[A, CC, C] - with collection.StrictOptimizedSortedSetOps[A, CC, C] - with StrictOptimizedSetOps[A, Set, C] { -} - -/** - * $factoryInfo - * @define coll immutable sorted set - * @define Coll `immutable.SortedSet` - */ -@SerialVersionUID(3L) -object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { - override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match { - case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss - case _ => super.from(it) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala deleted file mode 100644 index b1e4622971fb..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** - * Trait that overrides operations to take advantage of strict builders. - */ -trait StrictOptimizedSeqOps[+A, +CC[_], +C] - extends Any - with SeqOps[A, CC, C] - with collection.StrictOptimizedSeqOps[A, CC, C] - with StrictOptimizedIterableOps[A, CC, C] { - - override def distinctBy[B](f: A -> B): C = { - if (lengthCompare(1) <= 0) coll - else { - val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B @uncheckedCaptures] - val it = this.iterator - var different = false - while (it.hasNext) { - val next = it.next() - if (seen.add(f(next))) builder += next else different = true - } - if (different) builder.result() else coll - } - } - - override def updated[B >: A](index: Int, elem: B): CC[B] = { - if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})") - val b = iterableFactory.newBuilder[B] - if (knownSize >= 0) { - b.sizeHint(size) - } - var i = 0 - val it = iterator - while (i < index && it.hasNext) { - b += it.next() - i += 1 - } - if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})") - b += elem - it.next() - while (it.hasNext) b += it.next() - b.result() - } - - override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = { - val b = iterableFactory.newBuilder[B] - var i = 0 - val it = iterator - while (i < from && it.hasNext) { - b += it.next() - i += 1 - } - b ++= other - i = replaced - while (i > 0 && it.hasNext) { - it.next() - i -= 1 - } - while (it.hasNext) b += it.next() - b.result() - } - - override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) - -} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala deleted file mode 100644 index ff01ad7806ec..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.{RedBlackTree => RB} -import scala.collection.mutable.ReusableBuilder -import scala.runtime.AbstractFunction2 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** An immutable SortedMap whose values are stored in a red-black tree. - * - * This class is optimal when range queries will be performed, - * or when traversal in order of an ordering is desired. - * If you only need key lookups, and don't care in which order key-values - * are traversed in, consider using * [[scala.collection.immutable.HashMap]], - * which will generally have better performance. If you need insertion order, - * consider a * [[scala.collection.immutable.SeqMap]], which does not need to - * have an ordering supplied. - * - * @example {{{ - * import scala.collection.immutable.TreeMap - * - * // Make a TreeMap via the companion object factory - * val weekdays = TreeMap( - * 2 -> "Monday", - * 3 -> "Tuesday", - * 4 -> "Wednesday", - * 5 -> "Thursday", - * 6 -> "Friday" - * ) - * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) - * - * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") - * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * - * val day3 = days.get(3) // Some("Tuesday") - * - * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) - * - * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) - * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) - * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * }}} - * - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - * @param ordering the implicit ordering used to compare objects of type `A`. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll immutable.TreeMap - * @define coll immutable tree map - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) - extends AbstractMap[K, V] - with SortedMap[K, V] - with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] - with DefaultSerializable { - - def this()(implicit ordering: Ordering[K]) = this(null)(ordering) - private[immutable] def tree0: RB.Tree[K, V] = tree - - private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) - - override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap - - def iterator: Iterator[(K, V)] = RB.iterator(tree) - - def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) - - override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) - - def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) - - override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape.parUnbox( - scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( - size, tree, _.left, _.right, x => (x.key, x.value) - ) - ) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) - } - s.asInstanceOf[S with EfficientSplit] - } - - def get(key: K): Option[V] = RB.get(tree, key) - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val resultOrNull = RB.lookup(tree, key) - if (resultOrNull eq null) default - else resultOrNull.value - } - - def removed(key: K): TreeMap[K,V] = - newMapOrSelf(RB.delete(tree, key)) - - def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = - newMapOrSelf(RB.update(tree, key, value, overwrite = true)) - - override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] = - newMapOrSelf(that match { - case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => - RB.union(tree, tm.tree) - case ls: LinearSeq[(K,V1)] => - if (ls.isEmpty) tree //to avoid the creation of the adder - else { - val adder = new Adder[V1] - adder.addAll(ls) - adder.finalTree - } - case _ => - val adder = new Adder[V1] - val it = that.iterator - while (it.hasNext) { - adder.apply(it.next()) - } - adder.finalTree - }) - - override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match { - case ts: TreeSet[K] if ordering == ts.ordering => - newMapOrSelf(RB.difference(tree, ts.tree)) - case _ => super.removedAll(keys) - } - - /** A new TreeMap with the entry added is returned, - * assuming that key is not in the TreeMap. - * - * @tparam V1 type of the values of the new bindings, a supertype of `V` - * @param key the key to be inserted - * @param value the value to be associated with `key` - * @return a new $coll with the inserted binding, if it wasn't present in the map - */ - @deprecated("Use `updated` instead", "2.13.0") - def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { - assert(!RB.contains(tree, key)) - updated(key, value) - } - - def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) - - override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { - case null => Option.empty - case x => Some((x.key, x.value)) - } - - override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { - case null => Option.empty - case x => Some((x.key, x.value)) - } - - override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) - - override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) - override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) - override def size: Int = RB.count(tree) - override def knownSize: Int = size - - override def isEmpty = size == 0 - - override def firstKey: K = RB.smallest(tree).key - - override def lastKey: K = RB.greatest(tree).key - - override def head: (K, V) = { - val smallest = RB.smallest(tree) - (smallest.key, smallest.value) - } - - override def last: (K, V) = { - val greatest = RB.greatest(tree) - (greatest.key, greatest.value) - } - - override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) - - override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) - - override def drop(n: Int): TreeMap[K, V] = { - if (n <= 0) this - else if (n >= size) empty - else new TreeMap(RB.drop(tree, n)) - } - - override def take(n: Int): TreeMap[K, V] = { - if (n <= 0) empty - else if (n >= size) this - else new TreeMap(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int) = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else new TreeMap(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) - - override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) - - private[this] def countWhile(p: ((K, V)) => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - - override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) - - override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) - - override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) - - override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = - newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) - - override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { - val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) - (newMapOrSelf(l), newMapOrSelf(r)) - } - - override def transform[W](f: (K, V) => W): TreeMap[K, W] = { - val t2 = RB.transform[K, V, W](tree, f) - if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] - else new TreeMap(t2) - } - - private final class Adder[B1 >: V] - extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { - private var currentMutableTree: RB.Tree[K,B1] @uncheckedCaptures = tree0 - def finalTree = beforePublish(currentMutableTree) - override def apply(kv: (K, B1)): Unit = { - currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) - } - @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { - if (!ls.isEmpty) { - val kv = ls.head - currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) - addAll(ls.tail) - } - } - } - override def equals(obj: Any): Boolean = obj match { - case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) - case _ => super.equals(obj) - } - - override protected[this] def className = "TreeMap" -} - -/** $factoryInfo - * @define Coll immutable.TreeMap - * @define coll immutable tree map - */ -@SerialVersionUID(3L) -object TreeMap extends SortedMapFactory[TreeMap] { - - def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() - - def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] = - it match { - case tm: TreeMap[K, V] if ordering == tm.ordering => tm - case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => - new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) - case _ => - var t: RB.Tree[K, V] = null - val i = it.iterator - while (i.hasNext) { - val (k, v) = i.next() - t = RB.update(t, k, v, overwrite = true) - } - new TreeMap[K, V](t) - } - - def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] - - private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) - extends RB.MapHelper[K, V] - with ReusableBuilder[(K, V), TreeMap[K, V]] { - type Tree = RB.Tree[K, V] - private var tree:Tree @uncheckedCaptures = null - - def addOne(elem: (K, V)): this.type = { - tree = mutableUpd(tree, elem._1, elem._2) - this - } - private object adder extends AbstractFunction2[K, V, Unit] { - // we cache tree to avoid the outer access to tree - // in the hot path (apply) - private[this] var accumulator: Tree @uncheckedCaptures = null - def addForEach(hasForEach: collection.Map[K, V]): Unit = { - accumulator = tree - hasForEach.foreachEntry(this) - tree = accumulator - // be friendly to GC - accumulator = null - } - - override def apply(key: K, value: V): Unit = { - accumulator = mutableUpd(accumulator, key, value) - } - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeMap[K, V] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0) - case that: collection.Map[K, V] => - //add avoiding creation of tuples - adder.addForEach(that) - case _ => - super.addAll(xs) - } - this - } - - override def clear(): Unit = { - tree = null - } - - override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala deleted file mode 100644 index 91233669e5ca..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala +++ /dev/null @@ -1,651 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements an immutable map that preserves order using - * a hash map for the key to value mapping to provide efficient lookup, - * and a tree for the ordering of the keys to provide efficient - * insertion/modification order traversal and destructuring. - * - * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) - * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) - * can be used instead if so specified at creation. - * - * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method - * can be used to switch to the specified ordering for the returned map. - * - * A key can be manually refreshed (i.e. placed at the end) via the - * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in - * use). - * - * Internally, an ordinal counter is increased for each insertion/modification - * and then the current ordinal is used as key in the tree map. After 2^32^ - * insertions/modifications the entire map is copied (thus resetting the ordinal - * counter). - * - * @tparam K the type of the keys contained in this map. - * @tparam V the type of the values associated with the keys in this map. - * @define coll immutable tree seq map - * @define Coll `immutable.TreeSeqMap` - */ -final class TreeSeqMap[K, +V] private ( - private val ordering: TreeSeqMap.Ordering[K], - private val mapping: TreeSeqMap.Mapping[K, V], - private val ordinal: Int, - val orderedBy: TreeSeqMap.OrderBy) - extends AbstractMap[K, V] - with SeqMap[K, V] - with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] - with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] - with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { - - import TreeSeqMap._ - - override protected[this] def className: String = "TreeSeqMap" - - override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap - - override val size = mapping.size - - override def knownSize: Int = size - - override def isEmpty = size == 0 - - /* - // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible - // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. - override def empty = TreeSeqMap.empty[K, V](orderedBy) - */ - - def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { - if (orderBy == this.orderedBy) this - else if (isEmpty) TreeSeqMap.empty(orderBy) - else new TreeSeqMap(ordering, mapping, ordinal, orderBy) - } - - def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { - mapping.get(key) match { - case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => - // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. - TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) - case Some((o, _)) if orderedBy == OrderBy.Insertion => - new TreeSeqMap( - ordering.include(o, key), - mapping.updated[(Int, V1)](key, (o, value)), - ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. - orderedBy) - case Some((o, _)) => - val o1 = increment(ordinal) - new TreeSeqMap( - ordering.exclude(o).append(o1, key), - mapping.updated[(Int, V1)](key, (o1, value)), - o1, - orderedBy) - case None => - val o1 = increment(ordinal) - new TreeSeqMap( - ordering.append(o1, key), - mapping.updated[(Int, V1)](key, (o1, value)), - o1, - orderedBy) - } - } - - def removed(key: K): TreeSeqMap[K, V] = { - mapping.get(key) match { - case Some((o, _)) => - new TreeSeqMap( - ordering.exclude(o), - mapping.removed(key), - ordinal, - orderedBy) - case None => - this - } - } - - def refresh(key: K): TreeSeqMap[K, V] = { - mapping.get(key) match { - case Some((o, _)) => - val o1 = increment(ordinal) - new TreeSeqMap( - ordering.exclude(o).append(o1, key), - mapping, - o1, - orderedBy) - case None => - this - } - } - - def get(key: K): Option[V] = mapping.get(key).map(value) - - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - private[this] val iter = ordering.iterator - - override def hasNext: Boolean = iter.hasNext - - override def next(): (K, V) = binding(iter.next()) - } - - override def keysIterator: Iterator[K] = new AbstractIterator[K] { - private[this] val iter = ordering.iterator - - override def hasNext: Boolean = iter.hasNext - - override def next(): K = iter.next() - } - - override def valuesIterator: Iterator[V] = new AbstractIterator[V] { - private[this] val iter = ordering.iterator - - override def hasNext: Boolean = iter.hasNext - - override def next(): V = value(binding(iter.next())) - } - - override def contains(key: K): Boolean = mapping.contains(key) - - override def head: (K, V) = binding(ordering.head) - - override def headOption = ordering.headOption.map(binding) - - override def last: (K, V) = binding(ordering.last) - - override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) - - override def tail: TreeSeqMap[K, V] = { - val (head, tail) = ordering.headTail - new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) - } - - override def init: TreeSeqMap[K, V] = { - val (init, last) = ordering.initLast - new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) - } - - override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { - val sz = size - if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) - else { - val sz = size - val f = if (from >= 0) from else 0 - val u = if (until <= sz) until else sz - val l = u - f - if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) - else if (l > sz / 2) { - // Remove front and rear incrementally if majority of elements are to be kept - val (front, rest) = ordering.splitAt(f) - val (ong, rear) = rest.splitAt(l) - var mng = this.mapping - val frontIter = front.iterator - while (frontIter.hasNext) { - mng = mng - frontIter.next() - } - val rearIter = rear.iterator - while (rearIter.hasNext) { - mng = mng - rearIter.next() - } - new TreeSeqMap(ong, mng, ordinal, orderedBy) - } else { - // Populate with builder otherwise - val bdr = newBuilder[K @uncheckedCaptures, V @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - var i = 0 - while (i < f) { - iter.next() - i += 1 - } - while (i < u) { - val k = iter.next() - bdr.addOne((k, mapping(k)._2)) - i += 1 - } - bdr.result() - } - } - } - - override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - while (iter.hasNext) { - val k = iter.next() - val (_, v) = mapping(k) - val (k2, v2) = f((k, v)) - bdr.addOne((k2, v2)) - } - bdr.result() - } - - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - while (iter.hasNext) { - val k = iter.next() - val (_, v) = mapping(k) - val jter = f((k, v)).iterator - while (jter.hasNext) { - val (k2, v2) = jter.next() - bdr.addOne((k2, v2)) - } - } - bdr.result() - } - - override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - while (iter.hasNext) { - val k = iter.next() - val (_, v) = mapping(k) - pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) - } - bdr.result() - } - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = { - var ong: Ordering[K] = ordering - var mng: Mapping[K, V2] = mapping - var ord = increment(ordinal) - val iter = suffix.iterator - while (iter.hasNext) { - val (k, v2) = iter.next() - mng.get(k) match { - case Some((o, v)) => - if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) - else if (orderedBy == OrderBy.Modification) { - mng = mng.updated(k, (ord, v2)) - ong = ong.exclude(o).append(ord, k) - ord = increment(ord) - } - case None => - mng = mng.updated(k, (ord, v2)) - ong = ong.append(ord, k) - ord = increment(ord) - } - } - new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) - } - - @`inline` private[this] def value(p: (_, V)) = p._2 - @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) -} -object TreeSeqMap extends MapFactory[TreeSeqMap] { - sealed trait OrderBy - object OrderBy { - case object Insertion extends OrderBy - case object Modification extends OrderBy - } - - private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) - private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) - val Empty = EmptyByInsertion - def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) - def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { - if (orderBy == OrderBy.Modification) EmptyByModification - else EmptyByInsertion - }.asInstanceOf[TreeSeqMap[K, V]] - - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] = - it match { - case om: TreeSeqMap[K, V] => om - case _ => (newBuilder[K, V] ++= it).result() - } - - @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 - - def newBuilder[sealed K, sealed V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) - def newBuilder[sealed K, sealed V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) - - final class Builder[sealed K, sealed V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { - private[this] val bdr = new MapBuilderImpl[K, (Int, V)] - private[this] var ong = Ordering.empty[K] - private[this] var ord = 0 - private[this] var aliased: TreeSeqMap[K, V] = _ - - override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) - def addOne(key: K, value: V): this.type = { - if (aliased ne null) { - aliased = aliased.updated(key, value) - } else { - bdr.getOrElse(key, null) match { - case (o, v) => - if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) - else if (orderedBy == OrderBy.Modification) { - bdr.addOne(key, (ord, value)) - ong = ong.exclude(o).appendInPlace(ord, key) - ord = increment(ord) - } - case null => - bdr.addOne(key, (ord, value)) - ong = ong.appendInPlace(ord, key) - ord = increment(ord) - } - } - this - } - - override def clear(): Unit = { - ong = Ordering.empty - ord = 0 - bdr.clear() - aliased = null - } - - override def result(): TreeSeqMap[K, V] = { - if (aliased eq null) { - aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) - } - aliased - } - } - - private type Mapping[K, +V] = Map[K, (Int, V)] - @annotation.unused - private val Mapping = Map - - /* The ordering implementation below is an adapted version of immutable.IntMap. */ - private[immutable] object Ordering { - import scala.collection.generic.BitOperations.Int._ - - @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" - - def empty[T] : Ordering[T] = Zero - - def apply[T](elems: (Int, T)*): Ordering[T] = - elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) - - // Iterator over a non-empty Ordering. - final class Iterator[+V](it: Ordering[V]) { - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Ints are at least 32 bits we can have at most 32 Bins and - // one Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 33 - private[this] var index = 0 - private[this] val buffer = new Array[AnyRef](33) - - private[this] def pop = { - index -= 1 - buffer(index).asInstanceOf[Ordering[V]] - } - - private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - - if (it != Zero) push(it) - - def hasNext = index != 0 - @tailrec - def next(): V = - pop match { - case Bin(_,_, Tip(_, v), right) => - push(right) - v - case Bin(_, _, left, right) => - push(right) - push(left) - next() - case Tip(_, v) => v - // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering - // and don't return an Ordering.Iterator for Ordering.Zero. - case Zero => throw new IllegalStateException("empty subtree not allowed") - } - } - - object Iterator { - val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) - def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] - } - - case object Zero extends Ordering[Nothing] { - // Important! Without this equals method in place, an infinite - // loop from Map.equals => size => pattern-match-on-Nil => equals - // develops. Case objects and custom equality don't mix without - // careful handling. - override def equals(that : Any): Boolean = that match { - case _: this.type => true - case _: Ordering[_] => false // The only empty Orderings are eq Nil - case _ => super.equals(that) - } - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" - } - - final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] - else Tip(ord, s) - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" - } - - final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T @uncheckedCaptures] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { - def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] - else Bin[S](prefix, mask, left, right) - } - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { - sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" - left.format(sb, subPrefix + "├── ", subPrefix + "│ ") - right.format(sb, subPrefix + "└── ", subPrefix + " ") - } - } - - private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) - - private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) Bin(p, m, t1, t2) - else Bin(p, m, t2, t1) - } - - private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { - case (l, Zero) => l - case (Zero, r) => r - case (l, r) => Bin(prefix, mask, l, r) - } - } - - sealed abstract class Ordering[+T] { - import Ordering._ - import scala.annotation.tailrec - import scala.collection.generic.BitOperations.Int._ - - override final def toString: String = format - final def format: String = { - val sb = new StringBuilder - format(sb, "", "") - sb.toString() - } - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit - - @tailrec - final def head: T = this match { - case Zero => throw new NoSuchElementException("head of empty map") - case Tip(k, v) => v - case Bin(_, _, l, _) => l.head - } - - @tailrec - final def headOption: Option[T] = this match { - case Zero => None - case Tip(_, v) => Some(v) - case Bin(_, _, l, _) => l.headOption - } - - @tailrec - final def last: T = this match { - case Zero => throw new NoSuchElementException("last of empty map") - case Tip(_, v) => v - case Bin(_, _, _, r) => r.last - } - - @tailrec - final def lastOption: Option[T] = this match { - case Zero => None - case Tip(_, v) => Some(v) - case Bin(_, _, _, r) => r.lastOption - } - - @tailrec - final def ordinal: Int = this match { - case Zero => 0 - case Tip(o, _) => o - case Bin(_, _, _, r) => r.ordinal - } - - final def tail: Ordering[T] = this match { - case Zero => throw new NoSuchElementException("tail of empty map") - case Tip(_, _) => Zero - case Bin(p, m, l, r) => bin(p, m, l.tail, r) - } - - final def headTail: (T, Ordering[T]) = this match { - case Zero => throw new NoSuchElementException("init of empty map") - case Tip(_, v) => (v, Zero) - case Bin(p, m, l, r) => - val (head, tail) = l.headTail - (head, bin(p, m, tail, r)) - } - - final def init: Ordering[T] = this match { - case Zero => throw new NoSuchElementException("init of empty map") - case Tip(_, _) => Zero - case Bin(p, m, l, r) => - bin(p, m, l, r.init) - } - - final def initLast: (Ordering[T], T) = this match { - case Zero => throw new NoSuchElementException("init of empty map") - case Tip(_, v) => (Zero, v) - case Bin(p, m, l, r) => - val (init, last) = r.initLast - (bin(p, m, l, init), last) - } - - final def iterator: Iterator[T] = this match { - case Zero => Iterator.empty - case _ => new Iterator(this) - } - - final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { - case Zero => - Tip(ordinal, value) - case Tip(o, _) => - if (ordinal == o) Tip(ordinal, value) - else join(ordinal, Tip(ordinal, value), o, this) - case Bin(p, m, l, r) => - if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) - else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) - else Bin(p, m, l, r.include(ordinal, value)) - } - - final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { - case Zero => - Tip(ordinal, value) - case Tip(o, _) => - if (ordinal == o) Tip(ordinal, value) - else join(ordinal, Tip(ordinal, value), o, this) - case Bin(p, m, l, r) => - if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) - else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") - else Bin(p, m, l, r.append(ordinal, value)) - } - - @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) - private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { - case Zero => - Tip(ordinal, value) - case Tip(o, _) if o >= ordinal => - throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") - case Tip(o, _) if parent == null => - join(ordinal, Tip(ordinal, value), o, this) - case Tip(o, _) => - parent.right = join(ordinal, Tip(ordinal, value), o, this) - parent - case b @ Bin(p, m, _, r) => - if (!hasMatch(ordinal, p, m)) { - val b2 = join(ordinal, Tip(ordinal, value), p, this) - if (parent != null) { - parent.right = b2 - parent - } else b2 - } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") - else { - r.appendInPlace1(b, ordinal, value) - this - } - } - - final def exclude(ordinal: Int): Ordering[T] = this match { - case Zero => - Zero - case Tip(o, _) => - if (ordinal == o) Zero - else this - case Bin(p, m, l, r) => - if (!hasMatch(ordinal, p, m)) this - else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) - else bin(p, m, l, r.exclude(ordinal)) - } - - final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { - var rear: Ordering[T @uncheckedCaptures] = Ordering.empty[T] - var i = n - (modifyOrRemove { (o, v) => - i -= 1 - if (i >= 0) Some(v) - else { - rear = rear.appendInPlace(o, v) - None - } - }, rear) - } - - /** - * A combined transform and filter function. Returns an `Ordering` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value) == Some(x)` the - * map contains `(key, x)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { - case Zero => Zero - case Tip(key, value) => - f(key, value) match { - case None => Zero - case Some(value2) => - // hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] - else Tip(key, value2) - } - case Bin(prefix, mask, left, right) => - val l = left.modifyOrRemove(f) - val r = right.modifyOrRemove(f) - if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] - else bin(prefix, mask, l, r) - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala deleted file mode 100644 index c4241b818c38..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder -import scala.collection.immutable.{RedBlackTree => RB} -import scala.runtime.AbstractFunction1 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable sorted sets using a tree. - * - * @tparam A the type of the elements contained in this tree set - * @param ordering the implicit ordering used to compare objects of type `A` - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) - extends AbstractSet[A] - with SortedSet[A] - with SortedSetOps[A, TreeSet, TreeSet[A]] - with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] - with SortedSetFactoryDefaults[A, TreeSet, Set] - with DefaultSerializable { - - if (ordering eq null) throw new NullPointerException("ordering must not be null") - - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - - override def sortedIterableFactory = TreeSet - - private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) - - override def size: Int = RB.count(tree) - - override def isEmpty = size == 0 - - override def head: A = RB.smallest(tree).key - - override def last: A = RB.greatest(tree).key - - override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) - - override def init: TreeSet[A] = new TreeSet(RB.init(tree)) - - override def min[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord eq ordering) && nonEmpty) { - head - } else { - super.min(ord) - } - } - - override def max[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord eq ordering) && nonEmpty) { - last - } else { - super.max(ord) - } - } - - override def drop(n: Int): TreeSet[A] = { - if (n <= 0) this - else if (n >= size) empty - else new TreeSet(RB.drop(tree, n)) - } - - override def take(n: Int): TreeSet[A] = { - if (n <= 0) empty - else if (n >= size) this - else new TreeSet(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int): TreeSet[A] = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else new TreeSet(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) - - override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) - - private[this] def countWhile(p: A => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) - - override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) - - override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) - - override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - - override def minAfter(key: A): Option[A] = { - val v = RB.minAfter(tree, key) - if (v eq null) Option.empty else Some(v.key) - } - - override def maxBefore(key: A): Option[A] = { - val v = RB.maxBefore(tree, key) - if (v eq null) Option.empty else Some(v.key) - } - - def iterator: Iterator[A] = RB.keysIterator(tree) - - def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[A, Any] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - /** Checks if this set contains element `elem`. - * - * @param elem the element to check for membership. - * @return true, iff `elem` is contained in this set. - */ - def contains(elem: A): Boolean = RB.contains(tree, elem) - - override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) - - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) - - /** Creates a new `TreeSet` with the entry added. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def incl(elem: A): TreeSet[A] = - newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) - - /** Creates a new `TreeSet` with the entry removed. - * - * @param elem a new element to add. - * @return a new $coll containing all the elements of this $coll except `elem`. - */ - def excl(elem: A): TreeSet[A] = - newSetOrSelf(RB.delete(tree, elem)) - - override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { - val t = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - RB.union(tree, ts.tree) - case _ => - val it = that.iterator - var t = tree - while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) - t - } - newSetOrSelf(t) - } - - override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.difference(tree, ts.tree)) - case _ => - //TODO add an implementation of a mutable subtractor similar to TreeMap - //but at least this doesn't create a TreeSet for each iteration - object sub extends AbstractFunction1[A, Unit] { - var currentTree = tree - override def apply(k: A): Unit = { - currentTree = RB.delete(currentTree, k) - } - } - that.iterator.foreach(sub) - newSetOrSelf(sub.currentTree) - } - - override def intersect(that: collection.Set[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.intersect(tree, ts.tree)) - case _ => - super.intersect(that) - } - - override def diff(that: collection.Set[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.difference(tree, ts.tree)) - case _ => - super.diff(that) - } - - override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) - - override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { - val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) - (newSetOrSelf(l), newSetOrSelf(r)) - } - - override def equals(obj: Any): Boolean = obj match { - case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) - case _ => super.equals(obj) - } - - override protected[this] def className = "TreeSet" -} - -/** - * $factoryInfo - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - */ -@SerialVersionUID(3L) -object TreeSet extends SortedIterableFactory[TreeSet] { - - def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] - - def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = - it match { - case ts: TreeSet[E] if ordering == ts.ordering => ts - case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => - new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) - case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => - val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator - val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) - // The cast is needed to compile with Dotty: - // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound - new TreeSet[E](tree) - case _ => - var t: RB.Tree[E, Null] = null - val i = it.iterator - while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) - new TreeSet[E](t) - } - - def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] - private class TreeSetBuilder[A](implicit ordering: Ordering[A]) - extends RB.SetHelper[A] - with ReusableBuilder[A, TreeSet[A]] { - type Tree = RB.Tree[A, Any] - private [this] var tree:RB.Tree[A @uncheckedCaptures, Any] = null - - override def addOne(elem: A): this.type = { - tree = mutableUpd(tree, elem) - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeSet[A] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree - else tree = RB.union(beforePublish(tree), ts.tree)(ordering) - case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) - case _ => - super.addAll(xs) - } - this - } - - override def clear(): Unit = { - tree = null - } - - override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala deleted file mode 100644 index d9d33add512d..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Vector.scala +++ /dev/null @@ -1,2476 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import java.lang.Math.{abs, max => mmax, min => mmin} -import java.util.Arrays.{copyOf, copyOfRange} -import java.util.{Arrays, Spliterator} - -import scala.annotation.switch -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.VectorInline._ -import scala.collection.immutable.VectorStatics._ -import scala.collection.mutable.ReusableBuilder -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - - -/** $factoryInfo - * @define Coll `Vector` - * @define coll vector - */ -@SerialVersionUID(3L) -object Vector extends StrictOptimizedSeqFactory[Vector] { - - def empty[A]: Vector[A] = Vector0 - - def from[E](it: collection.IterableOnce[E]^): Vector[E] = - it match { - case v: Vector[E] => v - case _ => - val knownSize = it.knownSize - if (knownSize == 0) empty[E] - else if (knownSize > 0 && knownSize <= WIDTH) { - val a1: Arr1 = it match { - case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => - as.unsafeArray.asInstanceOf[Arr1] - case it: Iterable[E] => - val a1 = new Arr1(knownSize) - it.copyToArray(a1.asInstanceOf[Array[Any]]) - a1 - case _ => - val a1 = new Arr1(knownSize) - it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) - a1.asInstanceOf[Arr1] - } - new Vector1[E](a1) - } else { - (newBuilder ++= it).result() - } - } - - def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] - - /** Create a Vector with the same element at each index. - * - * Unlike `fill`, which takes a by-name argument for the value and can thereby - * compute different values for each index, this method guarantees that all - * elements are identical. This allows sparse allocation in O(log n) time and space. - */ - private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { - //TODO Make public; this method is private for now because it is not forward binary compatible - if(n <= 0) Vector0 - else { - val b = new VectorBuilder[A] - b.initSparse(n, elem) - b.result() - } - } - - private val defaultApplyPreferredMaxLength: Int = - try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", - "250").toInt - catch { - case _: SecurityException => 250 - } - - private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) -} - - -/** Vector is a general-purpose, immutable data structure. It provides random access and updates - * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). - * Because vectors strike a good balance between fast random selections and fast random functional updates, - * they are currently the default implementation of immutable indexed sequences. - * - * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass - * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the - * top level). - * - * Tree balancing: - * - Only the first dimension of an array may have a size < WIDTH - * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up - * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 - * - `prefix1` and `suffix1` are never empty - * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches - * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty - * - All arrays are left-aligned and truncated - * - * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running - * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. - */ -sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) - extends AbstractSeq[A] - with IndexedSeq[A] - with IndexedSeqOps[A, Vector, Vector[A]] - with StrictOptimizedSeqOps[A, Vector, Vector[A]] - with IterableFactoryDefaults[A, Vector] - with DefaultSerializable { - - override def iterableFactory: SeqFactory[Vector] = Vector - - override final def length: Int = - if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 - else prefix1.length - - override final def iterator: Iterator[A] = - if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator - else new NewVectorIterator(this, length, vectorSliceCount) - - override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { - var i = 0 - val len = prefix1.length - while (i != len) { - if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { - // each 1 bit indicates that index passes the filter. - // all indices < i are also assumed to pass the filter - var bitmap = 0 - var j = i + 1 - while (j < len) { - if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { - bitmap |= (1 << j) - } - j += 1 - } - val newLen = i + java.lang.Integer.bitCount(bitmap) - - if(this.isInstanceOf[BigVector[_]]) { - val b = new VectorBuilder[A] - var k = 0 - while(k < i) { - b.addOne(prefix1(k).asInstanceOf[A]) - k += 1 - } - k = i + 1 - while (i != newLen) { - if (((1 << k) & bitmap) != 0) { - b.addOne(prefix1(k).asInstanceOf[A]) - i += 1 - } - k += 1 - } - this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } - return b.result() - } else { - if (newLen == 0) return Vector0 - val newData = new Array[AnyRef](newLen) - System.arraycopy(prefix1, 0, newData, 0, i) - var k = i + 1 - while (i != newLen) { - if (((1 << k) & bitmap) != 0) { - newData(i) = prefix1(k) - i += 1 - } - k += 1 - } - return new Vector1[A](newData) - } - } - i += 1 - } - if(this.isInstanceOf[BigVector[_]]) { - val b = new VectorBuilder[A] - b.initFrom(prefix1) - this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } - b.result() - } else this - } - - // Dummy overrides to refine result types for binary compatibility: - override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) - override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) - override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = { - val k = prefix.knownSize - if (k == 0) this - else if (k < 0) super.prependedAll(prefix) - else prependedAll0(prefix, k) - } - - override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = { - val k = suffix.knownSize - if (k == 0) this - else if (k < 0) super.appendedAll(suffix) - else appendedAll0(suffix, k) - } - - protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - // k >= 0, k = prefix.knownSize - val tinyAppendLimit = 4 + vectorSliceCount - if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { - var v: Vector[B] = this - val it = IndexedSeq.from(prefix).reverseIterator - while (it.hasNext) v = it.next() +: v - v - } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { - var v = prefix.asInstanceOf[Vector[B]] - val it = this.iterator - while (it.hasNext) v = v :+ it.next() - v - } else if (k < this.size - AlignToFaster) { - new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() - } else super.prependedAll(prefix) - } - - protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - // k >= 0, k = suffix.knownSize - val tinyAppendLimit = 4 + vectorSliceCount - if (k < tinyAppendLimit) { - var v: Vector[B @uncheckedCaptures] = this - suffix match { - case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) - case _ => suffix.iterator.foreach(x => v = v.appended(x)) - } - v - } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { - var v = suffix.asInstanceOf[Vector[B]] - val ri = this.reverseIterator - while (ri.hasNext) v = v.prepended(ri.next()) - v - } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { - val v = suffix.asInstanceOf[Vector[B]] - new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() - } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() - } - - override def className = "Vector" - - @inline override final def take(n: Int): Vector[A] = slice(0, n) - @inline override final def drop(n: Int): Vector[A] = slice(n, length) - @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) - @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) - override def tail: Vector[A] = slice(1, length) - override def init: Vector[A] = slice(0, length-1) - - /** Like slice but parameters must be 0 <= lo < hi < length */ - protected[this] def slice0(lo: Int, hi: Int): Vector[A] - - /** Number of slices */ - protected[immutable] def vectorSliceCount: Int - /** Slice at index */ - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] - /** Length of all slices up to and including index */ - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) - - override def toVector: Vector[A] = this - - override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - val s = shape.shape match { - case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) - case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) - case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) - case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) - } - s.asInstanceOf[S with EfficientSplit] - } - - protected[this] def ioob(index: Int): IndexOutOfBoundsException = - new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") - - override final def head: A = - if (prefix1.length == 0) throw new NoSuchElementException("empty.head") - else prefix1(0).asInstanceOf[A] - - override final def last: A = { - if(this.isInstanceOf[BigVector[_]]) { - val suffix = this.asInstanceOf[BigVector[_]].suffix1 - if(suffix.length == 0) throw new NoSuchElementException("empty.tail") - else suffix(suffix.length-1) - } else prefix1(prefix1.length-1) - }.asInstanceOf[A] - - override final def foreach[U](f: A => U): Unit = { - val c = vectorSliceCount - var i = 0 - while (i < c) { - foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) - i += 1 - } - } - - // The following definitions are needed for binary compatibility with ParVector - private[collection] def startIndex: Int = 0 - private[collection] def endIndex: Int = length - private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = - s.it = iterator.asInstanceOf[NewVectorIterator[B]] -} - - -/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ -private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { - - override final def slice(from: Int, until: Int): Vector[A] = { - val lo = mmax(from, 0) - val hi = mmin(until, length) - if (hi <= lo) Vector0 - else if (hi - lo == length) this - else slice0(lo, hi) - } -} - - -/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ -private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { - - protected[immutable] final def foreachRest[U](f: A => U): Unit = { - val c = vectorSliceCount - var i = 1 - while(i < c) { - foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) - i += 1 - } - } -} - - -/** Empty vector */ -private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { - - def apply(index: Int): Nothing = throw ioob(index) - - override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) - - override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) - - override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) - - override def map[B](f: Nothing => B): Vector[B] = this - - override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") - - override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") - - protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this - - protected[immutable] def vectorSliceCount: Int = 0 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 - - override def equals(o: Any): Boolean = { - if(this eq o.asInstanceOf[AnyRef]) true - else o match { - case that: Vector[_] => false - case o => super.equals(o) - } - } - - override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - Vector.from(prefix) - - override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = - Vector.from(suffix) - - override protected[this] def ioob(index: Int): IndexOutOfBoundsException = - new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") -} - -/** Flat ArraySeq-like structure */ -private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { - - @inline def apply(index: Int): A = { - if(index >= 0 && index < prefix1.length) - prefix1(index).asInstanceOf[A] - else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < prefix1.length) - new Vector1(copyUpdate(prefix1, index, elem)) - else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - val len1 = prefix1.length - if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) - else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - val len1 = prefix1.length - if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) - else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) - } - - override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = - new Vector1(copyOfRange(prefix1, lo, hi)) - - override def tail: Vector[A] = - if(prefix1.length == 1) Vector0 - else new Vector1(copyTail(prefix1)) - - override def init: Vector[A] = - if(prefix1.length == 1) Vector0 - else new Vector1(copyInit(prefix1)) - - protected[immutable] def vectorSliceCount: Int = 1 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case data1b => new Vector1(data1b) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val data1b = append1IfSpace(prefix1, suffix) - if(data1b ne null) new Vector1(data1b) - else super.appendedAll0(suffix, k) - } -} - - -/** 2-dimensional radix-balanced finger tree */ -private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val data2: Arr2, - _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - data2: Arr2 = data2, - suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector2(prefix1, len1, data2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len1 - if(io >= 0) { - val i2 = io >>> BITS - val i1 = io & MASK - if(i2 < data2.length) data2(i2)(i1) - else suffix1(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len1) { - val io = index - len1 - val i2 = io >>> BITS - val i1 = io & MASK - if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) - else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) - else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, data2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 3 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => data2 - case 2 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => length0 - suffix1.length - case 2 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 3-dimensional radix-balanced finger tree */ -private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val data3: Arr3, - private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - data3: Arr3 = data3, - suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len12 - if(io >= 0) { - val i3 = io >>> BITS2 - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i3 < data3.length) data3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len12) { - val io = index - len12 - val i3 = io >>> BITS2 - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) - else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) - else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), - data3 = mapElems(3, data3, f), - suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, data3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 5 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => data3 - case 3 => suffix2 - case 4 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len12 + data3.length*WIDTH2 - case 3 => length0 - suffix1.length - case 4 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 4-dimensional radix-balanced finger tree */ -private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, - private[immutable] val data4: Arr4, - private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - prefix3: Arr3 = prefix3, len123: Int = len123, - data4: Arr4 = data4, - suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len123 - if(io >= 0) { - val i4 = io >>> BITS3 - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i4 < data4.length) data4(i4)(i3)(i2)(i1) - else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len12) { - val io = index - len12 - prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len123) { - val io = index - len123 - val i4 = io >>> BITS3 - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) - else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len12) { - val io = index - len12 - copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) - else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) - else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) - else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), - data4 = mapElems(4, data4, f), - suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, prefix3) - b.consider(4, data4) - b.consider(3, suffix3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 7 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => prefix3 - case 3 => data4 - case 4 => suffix3 - case 5 => suffix2 - case 6 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len123 - case 3 => len123 + data4.length*WIDTH3 - case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 - case 5 => length0 - suffix1.length - case 6 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - len123 = len123 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 5-dimensional radix-balanced finger tree */ -private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, - private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, - private[immutable] val data5: Arr5, - private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - prefix3: Arr3 = prefix3, len123: Int = len123, - prefix4: Arr4 = prefix4, len1234: Int = len1234, - data5: Arr5 = data5, - suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len1234 - if(io >= 0) { - val i5 = io >>> BITS4 - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) - else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) - else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len123) { - val io = index - len123 - prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len12) { - val io = index - len12 - prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len1234) { - val io = index - len1234 - val i5 = io >>> BITS4 - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) - else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) - else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len123) { - val io = index - len123 - copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len12) { - val io = index - len12 - copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) - else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) - else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) - else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) - else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), - data5 = mapElems(5, data5, f), - suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, prefix3) - b.consider(4, prefix4) - b.consider(5, data5) - b.consider(4, suffix4) - b.consider(3, suffix3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 9 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => prefix3 - case 3 => prefix4 - case 4 => data5 - case 5 => suffix4 - case 6 => suffix3 - case 7 => suffix2 - case 8 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len123 - case 3 => len1234 - case 4 => len1234 + data5.length*WIDTH4 - case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 - case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 - case 7 => length0 - suffix1.length - case 8 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - len123 = len123 + diff, - len1234 = len1234 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 6-dimensional radix-balanced finger tree */ -private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, - private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, - private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, - private[immutable] val data6: Arr6, - private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - prefix3: Arr3 = prefix3, len123: Int = len123, - prefix4: Arr4 = prefix4, len1234: Int = len1234, - prefix5: Arr5 = prefix5, len12345: Int = len12345, - data6: Arr6 = data6, - suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len12345 - if(io >= 0) { - val i6 = io >>> BITS5 - val i5 = (io >>> BITS4) & MASK - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) - else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) - else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) - else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len1234) { - val io = index - len1234 - prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len123) { - val io = index - len123 - prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len12) { - val io = index - len12 - prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len12345) { - val io = index - len12345 - val i6 = io >>> BITS5 - val i5 = (io >>> BITS4) & MASK - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) - else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) - else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) - else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len1234) { - val io = index - len1234 - copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len123) { - val io = index - len123 - copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len12) { - val io = index - len12 - copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else throw new IllegalArgumentException - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) - else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) - else throw new IllegalArgumentException - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), - data6 = mapElems(6, data6, f), - suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, prefix3) - b.consider(4, prefix4) - b.consider(5, prefix5) - b.consider(6, data6) - b.consider(5, suffix5) - b.consider(4, suffix4) - b.consider(3, suffix3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 11 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => prefix3 - case 3 => prefix4 - case 4 => prefix5 - case 5 => data6 - case 6 => suffix5 - case 7 => suffix4 - case 8 => suffix3 - case 9 => suffix2 - case 10 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len123 - case 3 => len1234 - case 4 => len12345 - case 5 => len12345 + data6.length*WIDTH5 - case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 - case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 - case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 - case 9 => length0 - suffix1.length - case 10 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - len123 = len123 + diff, - len1234 = len1234 + diff, - len12345 = len12345 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** Helper class for vector slicing. It is initialized with the validated start and end index, - * then the vector slices are added in succession with `consider`. No matter what the dimension - * of the originating vector is or where the cut is performed, this always results in a - * structure with the highest-dimensional data in the middle and fingers of decreasing dimension - * at both ends, which can be turned into a new vector with very little rebalancing. - */ -private final class VectorSliceBuilder(lo: Int, hi: Int) { - //println(s"***** VectorSliceBuilder($lo, $hi)") - - private[this] val slices = new Array[Array[AnyRef]](11) - private[this] var len, pos, maxDim = 0 - - @inline private[this] def prefixIdx(n: Int) = n-1 - @inline private[this] def suffixIdx(n: Int) = 11-n - - def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { - //println(s"***** consider($n, /${a.length})") - val count = a.length * (1 << (BITS*(n-1))) - val lo0 = mmax(lo-pos, 0) - val hi0 = mmin(hi-pos, count) - if(hi0 > lo0) { - addSlice(n, a, lo0, hi0) - len += (hi0 - lo0) - } - pos += count - } - - private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { - //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") - if(n == 1) { - add(1, copyOrUse(a, lo, hi)) - } else { - val bitsN = BITS * (n-1) - val widthN = 1 << bitsN - val loN = lo >>> bitsN - val hiN = hi >>> bitsN - val loRest = lo & (widthN - 1) - val hiRest = hi & (widthN - 1) - //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") - if(loRest == 0) { - if(hiRest == 0) { - add(n, copyOrUse(a, loN, hiN)) - } else { - if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) - addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) - } - } else { - if(hiN == loN) { - addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) - } else { - addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) - if(hiRest == 0) { - if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) - } else { - if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) - addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) - } - } - } - } - } - - private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { - //println(s"***** add($n, /${a.length})") - val idx = - if(n <= maxDim) suffixIdx(n) - else { maxDim = n; prefixIdx(n) } - slices(idx) = a.asInstanceOf[Array[AnyRef]] - } - - def result[A](): Vector[A] = { - //println(s"***** result: $len, $maxDim") - if(len <= 32) { - if(len == 0) Vector0 - else { - val prefix1 = slices(prefixIdx(1)) - val suffix1 = slices(suffixIdx(1)) - //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") - val a: Arr1 = - if(prefix1 ne null) { - if(suffix1 ne null) concatArrays(prefix1, suffix1) - else prefix1 - } else if(suffix1 ne null) suffix1 - else { - val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] - if(prefix2 ne null) prefix2(0) - else { - val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] - suffix2(0) - } - } - new Vector1(a) - } - } else { - balancePrefix(1) - balanceSuffix(1) - var resultDim = maxDim - if(resultDim < 6) { - val pre = slices(prefixIdx(maxDim)) - val suf = slices(suffixIdx(maxDim)) - if((pre ne null) && (suf ne null)) { - // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, - // otherwise increase the dimension - if(pre.length + suf.length <= WIDTH-2) { - slices(prefixIdx(maxDim)) = concatArrays(pre, suf) - slices(suffixIdx(maxDim)) = null - } else resultDim += 1 - } else { - // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we - // only allow WIDTH-2 for the main data, so increase the dimension in this case - val one = if(pre ne null) pre else suf - if(one.length > WIDTH-2) resultDim += 1 - } - } - val prefix1 = slices(prefixIdx(1)) - val suffix1 = slices(suffixIdx(1)) - val len1 = prefix1.length - val res = (resultDim: @switch) match { - case 2 => - val data2 = dataOr(2, empty2) - new Vector2[A](prefix1, len1, data2, suffix1, len) - case 3 => - val prefix2 = prefixOr(2, empty2) - val data3 = dataOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) - case 4 => - val prefix2 = prefixOr(2, empty2) - val prefix3 = prefixOr(3, empty3) - val data4 = dataOr(4, empty4) - val suffix3 = suffixOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - val len123 = len12 + (prefix3.length * WIDTH2) - new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) - case 5 => - val prefix2 = prefixOr(2, empty2) - val prefix3 = prefixOr(3, empty3) - val prefix4 = prefixOr(4, empty4) - val data5 = dataOr(5, empty5) - val suffix4 = suffixOr(4, empty4) - val suffix3 = suffixOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - val len123 = len12 + (prefix3.length * WIDTH2) - val len1234 = len123 + (prefix4.length * WIDTH3) - new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) - case 6 => - val prefix2 = prefixOr(2, empty2) - val prefix3 = prefixOr(3, empty3) - val prefix4 = prefixOr(4, empty4) - val prefix5 = prefixOr(5, empty5) - val data6 = dataOr(6, empty6) - val suffix5 = suffixOr(5, empty5) - val suffix4 = suffixOr(4, empty4) - val suffix3 = suffixOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - val len123 = len12 + (prefix3.length * WIDTH2) - val len1234 = len123 + (prefix4.length * WIDTH3) - val len12345 = len1234 + (prefix5.length * WIDTH4) - new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) - } - res - } - } - - @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { - val p = slices(prefixIdx(n)) - if(p ne null) p.asInstanceOf[Array[T]] else a - } - - @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { - val s = slices(suffixIdx(n)) - if(s ne null) s.asInstanceOf[Array[T]] else a - } - - @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { - val p = slices(prefixIdx(n)) - if(p ne null) p.asInstanceOf[Array[T]] - else { - val s = slices(suffixIdx(n)) - if(s ne null) s.asInstanceOf[Array[T]] else a - } - } - - /** Ensure prefix is not empty */ - private[this] def balancePrefix(n: Int): Unit = { - if(slices(prefixIdx(n)) eq null) { - if(n == maxDim) { - slices(prefixIdx(n)) = slices(suffixIdx(n)) - slices(suffixIdx(n)) = null - } else { - balancePrefix(n+1) - val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] - //assert(preN1 ne null) - slices(prefixIdx(n)) = preN1(0) - if(preN1.length == 1) { - slices(prefixIdx(n+1)) = null - if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n - } else { - slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] - } - } - } - } - - /** Ensure suffix is not empty */ - private[this] def balanceSuffix(n: Int): Unit = { - if(slices(suffixIdx(n)) eq null) { - if(n == maxDim) { - slices(suffixIdx(n)) = slices(prefixIdx(n)) - slices(prefixIdx(n)) = null - } else { - balanceSuffix(n+1) - val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] - //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") - slices(suffixIdx(n)) = sufN1(sufN1.length-1) - if(sufN1.length == 1) { - slices(suffixIdx(n+1)) = null - if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n - } else { - slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] - } - } - } - } - - override def toString: String = - s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" - - private[immutable] def getSlices: Array[Array[AnyRef]] = slices -} - - -final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { - - private[this] var a6: Arr6 = _ - private[this] var a5: Arr5 = _ - private[this] var a4: Arr4 = _ - private[this] var a3: Arr3 = _ - private[this] var a2: Arr2 = _ - private[this] var a1: Arr1 = new Arr1(WIDTH) - private[this] var len1, lenRest, offset = 0 - private[this] var prefixIsRightAligned = false - private[this] var depth = 1 - - @inline private[this] final def setLen(i: Int): Unit = { - len1 = i & MASK - lenRest = i - len1 - } - - override def knownSize: Int = len1 + lenRest - offset - - @inline def size: Int = knownSize - @inline def isEmpty: Boolean = knownSize == 0 - @inline def nonEmpty: Boolean = knownSize != 0 - - def clear(): Unit = { - a6 = null - a5 = null - a4 = null - a3 = null - a2 = null - a1 = new Arr1(WIDTH) - len1 = 0 - lenRest = 0 - offset = 0 - prefixIsRightAligned = false - depth = 1 - } - - private[immutable] def initSparse(size: Int, elem: A): Unit = { - setLen(size) - Arrays.fill(a1, elem) - if(size > WIDTH) { - a2 = new Array(WIDTH) - Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) - if(size > WIDTH2) { - a3 = new Array(WIDTH) - Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) - if(size > WIDTH3) { - a4 = new Array(WIDTH) - Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) - if(size > WIDTH4) { - a5 = new Array(WIDTH) - Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) - if(size > WIDTH5) { - a6 = new Array(LASTWIDTH) - Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) - depth = 6 - } else depth = 5 - } else depth = 4 - } else depth = 3 - } else depth = 2 - } else depth = 1 - } - - private[immutable] def initFrom(prefix1: Arr1): Unit = { - depth = 1 - setLen(prefix1.length) - a1 = copyOrUse(prefix1, 0, WIDTH) - if(len1 == 0 && lenRest > 0) { - // force advance() on next addition: - len1 = WIDTH - lenRest -= WIDTH - } - } - - private[immutable] def initFrom(v: Vector[_]): this.type = { - (v.vectorSliceCount: @switch) match { - case 0 => - case 1 => - val v1 = v.asInstanceOf[Vector1[_]] - depth = 1 - setLen(v1.prefix1.length) - a1 = copyOrUse(v1.prefix1, 0, WIDTH) - case 3 => - val v2 = v.asInstanceOf[Vector2[_]] - val d2 = v2.data2 - a1 = copyOrUse(v2.suffix1, 0, WIDTH) - depth = 2 - offset = WIDTH - v2.len1 - setLen(v2.length0 + offset) - a2 = new Arr2(WIDTH) - a2(0) = v2.prefix1 - System.arraycopy(d2, 0, a2, 1, d2.length) - a2(d2.length+1) = a1 - case 5 => - val v3 = v.asInstanceOf[Vector3[_]] - val d3 = v3.data3 - val s2 = v3.suffix2 - a1 = copyOrUse(v3.suffix1, 0, WIDTH) - depth = 3 - offset = WIDTH2 - v3.len12 - setLen(v3.length0 + offset) - a3 = new Arr3(WIDTH) - a3(0) = copyPrepend(v3.prefix1, v3.prefix2) - System.arraycopy(d3, 0, a3, 1, d3.length) - a2 = copyOf(s2, WIDTH) - a3(d3.length+1) = a2 - a2(s2.length) = a1 - case 7 => - val v4 = v.asInstanceOf[Vector4[_]] - val d4 = v4.data4 - val s3 = v4.suffix3 - val s2 = v4.suffix2 - a1 = copyOrUse(v4.suffix1, 0, WIDTH) - depth = 4 - offset = WIDTH3 - v4.len123 - setLen(v4.length0 + offset) - a4 = new Arr4(WIDTH) - a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) - System.arraycopy(d4, 0, a4, 1, d4.length) - a3 = copyOf(s3, WIDTH) - a2 = copyOf(s2, WIDTH) - a4(d4.length+1) = a3 - a3(s3.length) = a2 - a2(s2.length) = a1 - case 9 => - val v5 = v.asInstanceOf[Vector5[_]] - val d5 = v5.data5 - val s4 = v5.suffix4 - val s3 = v5.suffix3 - val s2 = v5.suffix2 - a1 = copyOrUse(v5.suffix1, 0, WIDTH) - depth = 5 - offset = WIDTH4 - v5.len1234 - setLen(v5.length0 + offset) - a5 = new Arr5(WIDTH) - a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) - System.arraycopy(d5, 0, a5, 1, d5.length) - a4 = copyOf(s4, WIDTH) - a3 = copyOf(s3, WIDTH) - a2 = copyOf(s2, WIDTH) - a5(d5.length+1) = a4 - a4(s4.length) = a3 - a3(s3.length) = a2 - a2(s2.length) = a1 - case 11 => - val v6 = v.asInstanceOf[Vector6[_]] - val d6 = v6.data6 - val s5 = v6.suffix5 - val s4 = v6.suffix4 - val s3 = v6.suffix3 - val s2 = v6.suffix2 - a1 = copyOrUse(v6.suffix1, 0, WIDTH) - depth = 6 - offset = WIDTH5 - v6.len12345 - setLen(v6.length0 + offset) - a6 = new Arr6(LASTWIDTH) - a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) - System.arraycopy(d6, 0, a6, 1, d6.length) - a5 = copyOf(s5, WIDTH) - a4 = copyOf(s4, WIDTH) - a3 = copyOf(s3, WIDTH) - a2 = copyOf(s2, WIDTH) - a6(d6.length+1) = a5 - a5(s5.length) = a4 - a4(s4.length) = a3 - a3(s3.length) = a2 - a2(s2.length) = a1 - } - if(len1 == 0 && lenRest > 0) { - // force advance() on next addition: - len1 = WIDTH - lenRest -= WIDTH - } - this - } - - //TODO Make public; this method is only private for binary compatibility - private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { - if (len1 != 0 || lenRest != 0) - throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") - val (prefixLength, maxPrefixLength) = bigVector match { - case Vector0 => (0, 1) - case v1: Vector1[_] => (0, 1) - case v2: Vector2[_] => (v2.len1, WIDTH) - case v3: Vector3[_] => (v3.len12, WIDTH2) - case v4: Vector4[_] => (v4.len123, WIDTH3) - case v5: Vector5[_] => (v5.len1234, WIDTH4) - case v6: Vector6[_] => (v6.len12345, WIDTH5) - } - if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector - val overallPrefixLength = (before + prefixLength) % maxPrefixLength - offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength - // pretend there are already `offset` elements added - advanceN(offset & ~MASK) - len1 = offset & MASK - prefixIsRightAligned = true - this - } - - /** - * Removes `offset` leading `null`s in the prefix. - * This is needed after calling `alignTo` and subsequent additions, - * directly before the result is used for creating a new Vector. - * Note that the outermost array keeps its length to keep the - * Builder re-usable. - * - * example: - * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) - * becomes - * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) - */ - private[this] def leftAlignPrefix(): Unit = { - @inline def shrinkOffsetIfToLarge(width: Int): Unit = { - val newOffset = offset % width - lenRest -= offset - newOffset - offset = newOffset - } - var a: Array[AnyRef] = null // the array we modify - var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a - if (depth >= 6) { - a = a6.asInstanceOf[Array[AnyRef]] - val i = offset >>> BITS5 - if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) - shrinkOffsetIfToLarge(WIDTH5) - if ((lenRest >>> BITS5) == 0) depth = 5 - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 5) { - if (a == null) a = a5.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS4) & MASK - if (depth == 5) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a5 = a.asInstanceOf[Arr5] - shrinkOffsetIfToLarge(WIDTH4) - if ((lenRest >>> BITS4) == 0) depth = 4 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 4) { - if (a == null) a = a4.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS3) & MASK - if (depth == 4) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a4 = a.asInstanceOf[Arr4] - shrinkOffsetIfToLarge(WIDTH3) - if ((lenRest >>> BITS3) == 0) depth = 3 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 3) { - if (a == null) a = a3.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS2) & MASK - if (depth == 3) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a3 = a.asInstanceOf[Arr3] - shrinkOffsetIfToLarge(WIDTH2) - if ((lenRest >>> BITS2) == 0) depth = 2 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 2) { - if (a == null) a = a2.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS) & MASK - if (depth == 2) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a2 = a.asInstanceOf[Arr2] - shrinkOffsetIfToLarge(WIDTH) - if ((lenRest >>> BITS) == 0) depth = 1 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 1) { - if (a == null) a = a1.asInstanceOf[Array[AnyRef]] - val i = offset & MASK - if (depth == 1) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a1 = a.asInstanceOf[Arr1] - len1 -= offset - offset = 0 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - } - prefixIsRightAligned = false - } - - def addOne(elem: A): this.type = { - if(len1 == WIDTH) advance() - a1(len1) = elem.asInstanceOf[AnyRef] - len1 += 1 - this - } - - private[this] def addArr1(data: Arr1): Unit = { - val dl = data.length - if(dl > 0) { - if(len1 == WIDTH) advance() - val copy1 = mmin(WIDTH-len1, dl) - val copy2 = dl - copy1 - System.arraycopy(data, 0, a1, len1, copy1) - len1 += copy1 - if(copy2 > 0) { - advance() - System.arraycopy(data, copy1, a1, 0, copy2) - len1 += copy2 - } - } - } - - private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { -// assert(dim >= 2) -// assert(lenRest % WIDTH == 0) -// assert(len1 == 0 || len1 == WIDTH) - if (slice.isEmpty) return - if (len1 == WIDTH) advance() - val sl = slice.length - (dim: @switch) match { - case 2 => - // lenRest is always a multiple of WIDTH - val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS) & MASK - System.arraycopy(slice, 0, a2, destPos, copy1) - advanceN(WIDTH * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a2, 0, copy2) - advanceN(WIDTH * copy2) - } - case 3 => - if (lenRest % WIDTH2 != 0) { - // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) - return - } - val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS2) & MASK - System.arraycopy(slice, 0, a3, destPos, copy1) - advanceN(WIDTH2 * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a3, 0, copy2) - advanceN(WIDTH2 * copy2) - } - case 4 => - if (lenRest % WIDTH3 != 0) { - // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) - return - } - val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS3) & MASK - System.arraycopy(slice, 0, a4, destPos, copy1) - advanceN(WIDTH3 * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a4, 0, copy2) - advanceN(WIDTH3 * copy2) - } - case 5 => - if (lenRest % WIDTH4 != 0) { - // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) - return - } - val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS4) & MASK - System.arraycopy(slice, 0, a5, destPos, copy1) - advanceN(WIDTH4 * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a5, 0, copy2) - advanceN(WIDTH4 * copy2) - } - case 6 => // note width is now LASTWIDTH - if (lenRest % WIDTH5 != 0) { - // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) - return - } - val copy1 = sl - // there is no copy2 because there can't be another a6 to copy to - val destPos = lenRest >>> BITS5 - if (destPos + copy1 > LASTWIDTH) - throw new IllegalArgumentException("exceeding 2^31 elements") - System.arraycopy(slice, 0, a6, destPos, copy1) - advanceN(WIDTH5 * copy1) - } - } - - private[this] def addVector(xs: Vector[A]): this.type = { - val sliceCount = xs.vectorSliceCount - var sliceIdx = 0 - while(sliceIdx < sliceCount) { - val slice = xs.vectorSlice(sliceIdx) - vectorSliceDim(sliceCount, sliceIdx) match { - case 1 => addArr1(slice.asInstanceOf[Arr1]) - case n if len1 == WIDTH || len1 == 0 => - addArrN(slice.asInstanceOf[Array[AnyRef]], n) - case n => foreachRec(n-2, slice, addArr1) - } - sliceIdx += 1 - } - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = xs match { - case v: Vector[_] => - if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) - else addVector(v.asInstanceOf[Vector[A]]) - case _ => - super.addAll(xs) - } - - private[this] def advance(): Unit = { - val idx = lenRest + WIDTH - val xor = idx ^ lenRest - lenRest = idx - len1 = 0 - advance1(idx, xor) - } - - private[this] def advanceN(n: Int): Unit = if (n > 0) { - // assert(n % 32 == 0) - val idx = lenRest + n - val xor = idx ^ lenRest - lenRest = idx - len1 = 0 - advance1(idx, xor) - } - - private[this] def advance1(idx: Int, xor: Int): Unit = { - if (xor <= 0) { // level = 6 or something very unexpected happened - throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") - } else if (xor < WIDTH2) { // level = 1 - if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } - a1 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - } else if (xor < WIDTH3) { // level = 2 - if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - } else if (xor < WIDTH4) { // level = 3 - if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a3 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - a4((idx >>> BITS3) & MASK) = a3 - } else if (xor < WIDTH5) { // level = 4 - if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a3 = new Array(WIDTH) - a4 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - a4((idx >>> BITS3) & MASK) = a3 - a5((idx >>> BITS4) & MASK) = a4 - } else { // level = 5 - if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a3 = new Array(WIDTH) - a4 = new Array(WIDTH) - a5 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - a4((idx >>> BITS3) & MASK) = a3 - a5((idx >>> BITS4) & MASK) = a4 - a6(idx >>> BITS5) = a5 - } - } - - def result(): Vector[A] = { - if (prefixIsRightAligned) leftAlignPrefix() - val len = len1 + lenRest - val realLen = len - offset - if(realLen == 0) Vector.empty - else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") - else if(len <= WIDTH) { - new Vector1(copyIfDifferentSize(a1, realLen)) - } else if(len <= WIDTH2) { - val i1 = (len-1) & MASK - val i2 = (len-1) >>> BITS - val data = copyOfRange(a2, 1, i2) - val prefix1 = a2(0) - val suffix1 = copyIfDifferentSize(a2(i2), i1+1) - new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) - } else if(len <= WIDTH3) { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) - val data = copyOfRange(a3, 1, i3) - val prefix2 = copyTail(a3(0)) - val prefix1 = a3(0)(0) - val suffix2 = copyOf(a3(i3), i2) - val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) - } else if(len <= WIDTH4) { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) & MASK - val i4 = ((len-1) >>> BITS3) - val data = copyOfRange(a4, 1, i4) - val prefix3 = copyTail(a4(0)) - val prefix2 = copyTail(a4(0)(0)) - val prefix1 = a4(0)(0)(0) - val suffix3 = copyOf(a4(i4), i3) - val suffix2 = copyOf(a4(i4)(i3), i2) - val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - val len123 = len12 + prefix3.length*WIDTH2 - new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) - } else if(len <= WIDTH5) { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) & MASK - val i4 = ((len-1) >>> BITS3) & MASK - val i5 = ((len-1) >>> BITS4) - val data = copyOfRange(a5, 1, i5) - val prefix4 = copyTail(a5(0)) - val prefix3 = copyTail(a5(0)(0)) - val prefix2 = copyTail(a5(0)(0)(0)) - val prefix1 = a5(0)(0)(0)(0) - val suffix4 = copyOf(a5(i5), i4) - val suffix3 = copyOf(a5(i5)(i4), i3) - val suffix2 = copyOf(a5(i5)(i4)(i3), i2) - val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - val len123 = len12 + prefix3.length*WIDTH2 - val len1234 = len123 + prefix4.length*WIDTH3 - new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) - } else { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) & MASK - val i4 = ((len-1) >>> BITS3) & MASK - val i5 = ((len-1) >>> BITS4) & MASK - val i6 = ((len-1) >>> BITS5) - val data = copyOfRange(a6, 1, i6) - val prefix5 = copyTail(a6(0)) - val prefix4 = copyTail(a6(0)(0)) - val prefix3 = copyTail(a6(0)(0)(0)) - val prefix2 = copyTail(a6(0)(0)(0)(0)) - val prefix1 = a6(0)(0)(0)(0)(0) - val suffix5 = copyOf(a6(i6), i5) - val suffix4 = copyOf(a6(i6)(i5), i4) - val suffix3 = copyOf(a6(i6)(i5)(i4), i3) - val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) - val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - val len123 = len12 + prefix3.length*WIDTH2 - val len1234 = len123 + prefix4.length*WIDTH3 - val len12345 = len1234 + prefix5.length*WIDTH4 - new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) - } - } - - override def toString: String = - s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" - - private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( - a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], - a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] - ).asInstanceOf[Array[Array[_]]] -} - - -/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ -private[immutable] object VectorInline { - // compile-time numeric constants - final val BITS = 5 - final val WIDTH = 1 << BITS - final val MASK = WIDTH - 1 - final val BITS2 = BITS * 2 - final val WIDTH2 = 1 << BITS2 - final val BITS3 = BITS * 3 - final val WIDTH3 = 1 << BITS3 - final val BITS4 = BITS * 4 - final val WIDTH4 = 1 << BITS4 - final val BITS5 = BITS * 5 - final val WIDTH5 = 1 << BITS5 - final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: - final val Log2ConcatFaster = 5 - final val AlignToFaster = 64 - - type Arr1 = Array[AnyRef] - type Arr2 = Array[Array[AnyRef]] - type Arr3 = Array[Array[Array[AnyRef]]] - type Arr4 = Array[Array[Array[Array[AnyRef]]]] - type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] - type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] - - /** Dimension of the slice at index */ - @inline def vectorSliceDim(count: Int, idx: Int): Int = { - val c = count/2 - c+1-abs(idx-c) - } - - @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = - if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) - - @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) - - @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) - - @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = - if(a.length == len) a else copyOf[T](a, len) - - @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } - @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } - @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } - @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } - @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } - - @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { - val a1c = a1.clone() - a1c(idx1) = elem.asInstanceOf[AnyRef] - a1c - } - - @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { - val a2c = a2.clone() - a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) - a2c - } - - @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { - val a3c = a3.clone() - a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) - a3c - } - - @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { - val a4c = a4.clone() - a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) - a4c - } - - @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { - val a5c = a5.clone() - a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) - a5c - } - - @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { - val a6c = a6.clone() - a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) - a6c - } - - @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { - val dest = copyOf[T](a, a.length+b.length) - System.arraycopy(b, 0, dest, a.length, b.length) - dest - } -} - - -/** Helper methods and constants for Vector. */ -private object VectorStatics { - - final def copyAppend1(a: Arr1, elem: Any): Arr1 = { - val alen = a.length - val ac = new Arr1(alen+1) - System.arraycopy(a, 0, ac, 0, alen) - ac(alen) = elem.asInstanceOf[AnyRef] - ac - } - - final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { - val ac = copyOf(a, a.length+1) - ac(ac.length-1) = elem - ac - } - - final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { - val ac = new Arr1(a.length+1) - System.arraycopy(a, 0, ac, 1, a.length) - ac(0) = elem.asInstanceOf[AnyRef] - ac - } - - final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { - val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] - System.arraycopy(a, 0, ac, 1, a.length) - ac(0) = elem - ac - } - - final val empty1: Arr1 = new Array(0) - final val empty2: Arr2 = new Array(0) - final val empty3: Arr3 = new Array(0) - final val empty4: Arr4 = new Array(0) - final val empty5: Arr5 = new Array(0) - final val empty6: Arr6 = new Array(0) - - final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { - var i = 0 - val len = a.length - if(level == 0) { - while(i < len) { - f(a(i).asInstanceOf[A]) - i += 1 - } - } else { - val l = level-1 - while(i < len) { - foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) - i += 1 - } - } - } - - final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { - var i = 0 - while(i < a.length) { - val v1 = a(i).asInstanceOf[AnyRef] - val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] - if(v1 ne v2) - return mapElems1Rest(a, f, i, v2) - i += 1 - } - a - } - - final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { - val ac = new Arr1(a.length) - if(at > 0) System.arraycopy(a, 0, ac, 0, at) - ac(at) = v2 - var i = at+1 - while(i < a.length) { - ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] - i += 1 - } - ac - } - - final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { - if(n == 1) - mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] - else { - var i = 0 - while(i < a.length) { - val v1 = a(i) - val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) - if(v1 ne v2) - return mapElemsRest(n, a, f, i, v2) - i += 1 - } - a - } - } - - final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { - val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] - if(at > 0) System.arraycopy(a, 0, ac, 0, at) - ac(at) = v2 - var i = at+1 - while(i < a.length) { - ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) - i += 1 - } - ac.asInstanceOf[Array[T]] - } - - final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { - case it: Iterable[_] => - if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { - it.size match { - case 0 => null - case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) - case s => - val prefix1b = new Arr1(prefix1.length + s) - System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) - it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) - prefix1b - } - } else null - case it => - val s = it.knownSize - if(s > 0 && s <= WIDTH-prefix1.length) { - val prefix1b = new Arr1(prefix1.length + s) - System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) - it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) - prefix1b - } else null - } - - final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { - case it: Iterable[_] => - if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { - it.size match { - case 0 => null - case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) - case s => - val suffix1b = copyOf(suffix1, suffix1.length + s) - it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) - suffix1b - } - } else null - case it => - val s = it.knownSize - if(s > 0 && s <= WIDTH-suffix1.length) { - val suffix1b = copyOf(suffix1, suffix1.length + s) - it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) - suffix1b - } else null - } -} - - -private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable { - - private[this] var a1: Arr1 = v.prefix1 - private[this] var a2: Arr2 = _ - private[this] var a3: Arr3 = _ - private[this] var a4: Arr4 = _ - private[this] var a5: Arr5 = _ - private[this] var a6: Arr6 = _ - private[this] var a1len = a1.length - private[this] var i1 = 0 // current index in a1 - private[this] var oldPos = 0 - private[this] var len1 = totalLength // remaining length relative to a1 - - private[this] var sliceIdx = 0 - private[this] var sliceDim = 1 - private[this] var sliceStart = 0 // absolute position - private[this] var sliceEnd = a1len // absolute position - - //override def toString: String = - // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" - - @inline override def knownSize = len1 - i1 - - @inline def hasNext: Boolean = len1 > i1 - - def next(): A = { - if(i1 == a1len) advance() - val r = a1(i1) - i1 += 1 - r.asInstanceOf[A] - } - - private[this] def advanceSlice(): Unit = { - if(!hasNext) Iterator.empty.next() - sliceIdx += 1 - var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) - while(slice.length == 0) { - sliceIdx += 1 - slice = v.vectorSlice(sliceIdx) - } - sliceStart = sliceEnd - sliceDim = vectorSliceDim(sliceCount, sliceIdx) - (sliceDim: @switch) match { - case 1 => a1 = slice.asInstanceOf[Arr1] - case 2 => a2 = slice.asInstanceOf[Arr2] - case 3 => a3 = slice.asInstanceOf[Arr3] - case 4 => a4 = slice.asInstanceOf[Arr4] - case 5 => a5 = slice.asInstanceOf[Arr5] - case 6 => a6 = slice.asInstanceOf[Arr6] - } - sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) - if(sliceEnd > totalLength) sliceEnd = totalLength - if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 - } - - private[this] def advance(): Unit = { - val pos = i1-len1+totalLength - if(pos == sliceEnd) advanceSlice() - if(sliceDim > 1) { - val io = pos - sliceStart - val xor = oldPos ^ io - advanceA(io, xor) - oldPos = io - } - len1 -= i1 - a1len = mmin(a1.length, len1) - i1 = 0 - } - - private[this] def advanceA(io: Int, xor: Int): Unit = { - if(xor < WIDTH2) { - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH3) { - a2 = a3((io >>> BITS2) & MASK) - a1 = a2(0) - } else if(xor < WIDTH4) { - a3 = a4((io >>> BITS3) & MASK) - a2 = a3(0) - a1 = a2(0) - } else if(xor < WIDTH5) { - a4 = a5((io >>> BITS4) & MASK) - a3 = a4(0) - a2 = a3(0) - a1 = a2(0) - } else { - a5 = a6(io >>> BITS5) - a4 = a5(0) - a3 = a4(0) - a2 = a3(0) - a1 = a2(0) - } - } - - private[this] def setA(io: Int, xor: Int): Unit = { - if(xor < WIDTH2) { - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH3) { - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH4) { - a3 = a4((io >>> BITS3) & MASK) - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH5) { - a4 = a5((io >>> BITS4) & MASK) - a3 = a4((io >>> BITS3) & MASK) - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } else { - a5 = a6(io >>> BITS5) - a4 = a5((io >>> BITS4) & MASK) - a3 = a4((io >>> BITS3) & MASK) - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } - } - - override def drop(n: Int): Iterator[A] = { - if(n > 0) { - val oldpos = i1-len1+totalLength - val newpos = mmin(oldpos + n, totalLength) - if(newpos == totalLength) { - i1 = 0 - len1 = 0 - a1len = 0 - } else { - while(newpos >= sliceEnd) advanceSlice() - val io = newpos - sliceStart - if(sliceDim > 1) { - val xor = oldPos ^ io - setA(io, xor) - oldPos = io - } - a1len = a1.length - i1 = io & MASK - len1 = i1 + (totalLength-newpos) - if(a1len > len1) a1len = len1 - } - } - this - } - - override def take(n: Int): Iterator[A] = { - if(n < knownSize) { - val trunc = knownSize - mmax(0, n) - totalLength -= trunc - len1 -= trunc - if(len1 < a1len) a1len = len1 - if(totalLength < sliceEnd) sliceEnd = totalLength - } - this - } - - override def slice(from: Int, until: Int): Iterator[A] = { - val _until = - if(from > 0) { - drop(from) - until - from - } else until - take(_until) - } - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val xsLen = xs.length - val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) - var copied = 0 - val isBoxed = xs.isInstanceOf[Array[AnyRef]] - while(copied < total) { - if(i1 == a1len) advance() - val count = mmin(total-copied, a1.length-i1) - if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) - else Array.copy(a1, i1, xs, start+copied, count) - i1 += count - copied += count - } - total - } - - override def toVector: Vector[A] = - v.slice(i1-len1+totalLength, totalLength) - - protected[immutable] def split(at: Int): NewVectorIterator[A] = { - val it2 = clone().asInstanceOf[NewVectorIterator[A]] - it2.take(at) - drop(at) - it2 - } -} - - -private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) - extends Stepper[A] with EfficientSplit { - - protected[this] def build(it: NewVectorIterator[A]): Semi - - final def hasStep: Boolean = it.hasNext - - final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED - - final def estimateSize: Long = it.knownSize - - def trySplit(): Sub = { - val len = it.knownSize - if(len > 1) build(it.split(len >>> 1)) - else null - } - - override final def iterator: Iterator[A] = it -} - -private class AnyVectorStepper[A](it: NewVectorIterator[A]) - extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { - protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) - def nextStep(): A = it.next() -} - -private class DoubleVectorStepper(it: NewVectorIterator[Double]) - extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { - protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) - def nextStep(): Double = it.next() -} - -private class IntVectorStepper(it: NewVectorIterator[Int]) - extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { - protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) - def nextStep(): Int = it.next() -} - -private class LongVectorStepper(it: NewVectorIterator[Long]) - extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { - protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) - def nextStep(): Long = it.next() -} - - -// The following definitions are needed for binary compatibility with ParVector -private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { - private[immutable] var it: NewVectorIterator[A @uncheckedVariance @uncheckedCaptures] = _ - def hasNext: Boolean = it.hasNext - def next(): A = it.next() - private[collection] def remainingElementCount: Int = it.size - private[collection] def remainingVector: Vector[A] = it.toVector -} diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala deleted file mode 100644 index 0860a0b47f28..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. - * - * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense - * of using extra memory and generally lower performance for other operations - * - * @tparam K the type of the keys contained in this vector map. - * @tparam V the type of the values associated with the keys in this vector map. - * - * @define coll immutable vector map - * @define Coll `immutable.VectorMap` - */ -final class VectorMap[K, +V] private ( - private[immutable] val fields: Vector[Any], - private[immutable] val underlying: Map[K, (Int, V)], dropped: Int) - extends AbstractMap[K, V] - with SeqMap[K, V] - with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] - with MapFactoryDefaults[K, V, VectorMap, Iterable] { - - import VectorMap._ - - override protected[this] def className: String = "VectorMap" - - private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = { - this(fields, underlying, 0) - } - - override val size = underlying.size - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { - underlying.get(key) match { - case Some((slot, _)) => - new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) - case None => - new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) - } - } - - override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = - new Map.WithDefault(this, d) - - override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = - new Map.WithDefault[K, V1](this, _ => d) - - def get(key: K): Option[V] = underlying.get(key) match { - case Some(v) => Some(v._2) - case None => None - } - - @tailrec - private def nextValidField(slot: Int): (Int, K) = { - if (slot >= fields.size) (-1, null.asInstanceOf[K]) - else fields(slot) match { - case Tombstone(distance) => - nextValidField(slot + distance) - case k => - (slot, k.asInstanceOf[K]) - } - } - - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - private[this] val fieldsLength = fields.length - private[this] var slot = -1 - private[this] var key: K = null.asInstanceOf[K] - - private[this] def advance(): Unit = { - val nextSlot = slot + 1 - if (nextSlot >= fieldsLength) { - slot = fieldsLength - key = null.asInstanceOf[K] - } else { - nextValidField(nextSlot) match { - case (-1, _) => - slot = fieldsLength - key = null.asInstanceOf[K] - case (s, k) => - slot = s - key = k - } - } - } - - advance() - - override def hasNext: Boolean = slot < fieldsLength - - override def next(): (K, V) = { - if (!hasNext) throw new NoSuchElementException("next called on depleted iterator") - val result = (key, underlying(key)._2) - advance() - result - } - } - - // No-Op overrides to allow for more efficient steppers in a minor release. - // Refining the return type to `S with EfficientSplit` is binary compatible. - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) - - - def removed(key: K): VectorMap[K, V] = { - if (isEmpty) empty - else { - var fs = fields - val sz = fs.size - underlying.get(key) match { - case Some(_) if size == 1 => empty - case Some((slot, _)) => - val s = slot - dropped - - // Calculate next of kin - val next = - if (s < sz - 1) fs(s + 1) match { - case Tombstone(d) => s + d + 1 - case _ => s + 1 - } else s + 1 - - fs = fs.updated(s, Tombstone(next - s)) - - // Calculate first index of preceding tombstone sequence - val first = - if (s > 0) { - fs(s - 1) match { - case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 - case Tombstone(d) if d == 1 => s - 1 - case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) - case _ => s - } - }else s - fs = fs.updated(first, Tombstone(next - first)) - - // Calculate last index of succeeding tombstone sequence - val last = next - 1 - if (last != first) { - fs = fs.updated(last, Tombstone(first - 1 - last)) - } - new VectorMap(fs, underlying - key, dropped) - case _ => - this - } - } - } - - override def mapFactory: MapFactory[VectorMap] = VectorMap - - override def contains(key: K): Boolean = underlying.contains(key) - - override def head: (K, V) = iterator.next() - - override def last: (K, V) = { - if (isEmpty) throw new UnsupportedOperationException("empty.last") - val lastSlot = fields.length - 1 - val last = fields.last match { - case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] - case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] - case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) - case k => k.asInstanceOf[K] - } - (last, underlying(last)._2) - } - - override def lastOption: Option[(K, V)] = { - if (isEmpty) None - else Some(last) - } - - override def tail: VectorMap[K, V] = { - if (isEmpty) throw new UnsupportedOperationException("empty.tail") - val (slot, key) = nextValidField(0) - new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) - } - - override def init: VectorMap[K, V] = { - if (isEmpty) throw new UnsupportedOperationException("empty.init") - val lastSlot = fields.size - 1 - val (slot, key) = fields.last match { - case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) - case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) - case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) - case k => (lastSlot, k.asInstanceOf[K]) - } - new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) - } - - override def keys: Vector[K] = keysIterator.toVector - - override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { - override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) - } -} - -object VectorMap extends MapFactory[VectorMap] { - //Class to mark deleted slots in 'fields'. - //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' - // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). - //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' - // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. - //For other deleted slots, it simply indicates that they have been deleted. - private[VectorMap] final case class Tombstone(distance: Int) - - private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = - new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) - - def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] = - it match { - case vm: VectorMap[K, V] => vm - case _ => (newBuilder[K, V] ++= it).result() - } - - def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] -} - -private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { - private[this] val vectorBuilder = new VectorBuilder[K] - private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] - private[this] var aliased: VectorMap[K, V] @uncheckedCaptures = _ // OK since VectorMapBuilder is private - - override def clear(): Unit = { - vectorBuilder.clear() - mapBuilder.clear() - aliased = null - } - - override def result(): VectorMap[K, V] = { - if (aliased eq null) { - aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) - } - aliased - } - def addOne(key: K, value: V): this.type = { - if (aliased ne null) { - aliased = aliased.updated(key, value) - } else { - mapBuilder.getOrElse(key, null) match { - case (slot, _) => - mapBuilder.addOne(key, (slot, value)) - case null => - val vectorSize = vectorBuilder.size - vectorBuilder.addOne(key) - mapBuilder.addOne(key, (vectorSize, value)) - } - } - this - } - - override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) -} diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala deleted file mode 100644 index 47fe769c81ef..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import scala.Predef.{wrapString => _, assert} -import scala.collection.Stepper.EfficientSplit -import scala.collection.convert.impl.CharStringStepper -import scala.collection.mutable.{Builder, StringBuilder} -import language.experimental.captureChecking - -/** - * This class serves as a wrapper augmenting `String`s with all the operations - * found in indexed sequences. - * - * The difference between this class and `StringOps` is that calling transformer - * methods such as `filter` and `map` will yield an object of type `WrappedString` - * rather than a `String`. - * - * @param self a string contained within this wrapped string - * - * @define Coll `WrappedString` - * @define coll wrapped string - */ -@SerialVersionUID(3L) -final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] - with IndexedSeqOps[Char, IndexedSeq, WrappedString] - with Serializable - with Pure { - - def apply(i: Int): Char = self.charAt(i) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder - override def empty: WrappedString = WrappedString.empty - - override def slice(from: Int, until: Int): WrappedString = { - val start = if (from < 0) 0 else from - if (until <= start || start >= self.length) - return WrappedString.empty - - val end = if (until > length) length else until - new WrappedString(self.substring(start, end)) - } - override def length = self.length - override def toString = self - override def view: StringView = new StringView(self) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { - val st = new CharStringStepper(self, 0, self.length) - val r = - if (shape.shape == StepperShape.CharShape) st - else { - assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") - AnyStepper.ofParIntStepper(st) - } - r.asInstanceOf[S with EfficientSplit] - } - - override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean = - that match { - case s: WrappedString => self.startsWith(s.self, offset) - case _ => super.startsWith(that, offset) - } - - override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean = - that match { - case s: WrappedString => self.endsWith(s.self) - case _ => super.endsWith(that) - } - - override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { - case c: Char => self.indexOf(c, from) - case _ => super.indexOf(elem, from) - } - - override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = - elem match { - case c: Char => self.lastIndexOf(c, end) - case _ => super.lastIndexOf(elem, end) - } - - override def copyToArray[sealed B >: Char](xs: Array[B], start: Int, len: Int): Int = - (xs: Any) match { - case chs: Array[Char] => - val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) - self.getChars(0, copied, chs, start) - copied - case _ => super.copyToArray(xs, start, len) - } - - override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] = - suffix match { - case s: WrappedString => new WrappedString(self concat s.self) - case _ => super.appendedAll(suffix) - } - - override def sameElements[B >: Char](o: IterableOnce[B]^) = o match { - case s: WrappedString => self == s.self - case _ => super.sameElements(o) - } - - override protected[this] def className = "WrappedString" - - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - override def equals(other: Any): Boolean = other match { - case that: WrappedString => - this.self == that.self - case _ => - super.equals(other) - } -} - -/** A companion object for wrapped strings. - */ -@SerialVersionUID(3L) -object WrappedString extends SpecificIterableFactory[Char, WrappedString] { - def fromSpecific(it: IterableOnce[Char]^): WrappedString = { - val b = newBuilder - val s = it.knownSize - if(s >= 0) b.sizeHint(s) - b ++= it - b.result() - } - val empty: WrappedString = new WrappedString("") - def newBuilder: Builder[Char, WrappedString] = - new StringBuilder().mapResult(x => new WrappedString(x)) - - implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { - def unwrap: String = value.self - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala deleted file mode 100644 index 985ef22859be..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/package.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -package object immutable { - type StringOps = scala.collection.StringOps - val StringOps = scala.collection.StringOps - type StringView = scala.collection.StringView - val StringView = scala.collection.StringView - - @deprecated("Use Iterable instead of Traversable", "2.13.0") - type Traversable[+X] = Iterable[X] - @deprecated("Use Iterable instead of Traversable", "2.13.0") - val Traversable = Iterable - - @deprecated("Use Map instead of DefaultMap", "2.13.0") - type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] -} diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala deleted file mode 100644 index a6413649e219..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala +++ /dev/null @@ -1,603 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializationProxy -import scala.language.implicitConversions -import language.experimental.captureChecking - - -/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically significantly faster with `AnyRefMap` than [[HashMap]]. - * Note that numbers and characters are not handled specially in AnyRefMap; - * only plain `equals` and `hashCode` are used in comparisons. - * - * Methods that traverse or regenerate the map, including `foreach` and `map`, - * are not in general faster than with `HashMap`. The methods `foreachKey`, - * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster - * than alternative ways to achieve the same functionality. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `AnyRefMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29^ entries (approximately - * 500 million). The maximum capacity is 2^30^, but performance will degrade - * rapidly as 2^30^ is approached. - * - */ -class AnyRefMap[K <: AnyRef, sealed V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean) - extends AbstractMap[K, V] - with MapOps[K, V, Map, AnyRefMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] - with Serializable { - - import AnyRefMap._ - def this() = this(AnyRefMap.exceptionDefault, 16, true) - - /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: K -> V) = this(defaultEntry, 16, true) - - /** Creates a new `AnyRefMap` with an initial buffer of specified size. - * - * An `AnyRefMap` can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) - - /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ - def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - - private[this] var mask = 0 - private[this] var _size = 0 - private[this] var _vacant = 0 - private[this] var _hashes: Array[Int] = null - private[this] var _keys: Array[AnyRef] = null - private[this] var _values: Array[AnyRef] = null - - if (initBlank) defaultInitialize(initialBufferSize) - - private[this] def defaultInitialize(n: Int): Unit = { - mask = - if (n<0) 0x7 - else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 - _hashes = new Array[Int](mask+1) - _keys = new Array[AnyRef](mask+1) - _values = new Array[AnyRef](mask+1) - } - - private[collection] def initializeTo( - m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] - ): Unit = { - mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz - } - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = { - var sz = coll.knownSize - if(sz < 0) sz = 4 - val arm = new AnyRefMap[K, V](sz * 2) - coll.iterator.foreach{ case (k,v) => arm(k) = v } - if (arm.size < (sz>>3)) arm.repack() - arm - } - override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder - - override def size: Int = _size - override def knownSize: Int = size - override def isEmpty: Boolean = _size == 0 - override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) - - private def imbalanced: Boolean = - (_size + _vacant) > 0.5*mask || _vacant > _size - - private def hashOf(key: K): Int = { - // Note: this method must not return 0 or Int.MinValue, as these indicate no element - if (key eq null) 0x41081989 - else { - val h = key.hashCode - // Part of the MurmurHash3 32 bit finalizer - val i = (h ^ (h >>> 16)) * 0x85EBCA6B - val j = (i ^ (i >>> 13)) & 0x7FFFFFFF - if (j==0) 0x41081989 else j - } - } - - private def seekEntry(h: Int, k: AnyRef): Int = { - var e = h & mask - var x = 0 - var g = 0 - val hashes = _hashes - val keys = _keys - while ({ g = hashes(e); g != 0}) { - if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - e | MissingBit - } - - @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { - var e = h & mask - var x = 0 - var g = 0 - var o = -1 - while ({ g = _hashes(e); g != 0}) { - if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e - else if (o == -1 && g+g == 0) o = e - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - if (o >= 0) o | MissVacant else e | MissingBit - } - - override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 - - override def get(key: K): Option[V] = { - val i = seekEntry(hashOf(key), key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val i = seekEntry(hashOf(key), key) - if (i < 0) default else _values(i).asInstanceOf[V] - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - val h = hashOf(key) - var i = seekEntryOrOpen(h, key) - if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) - val value = { - val oh = _hashes - val ans = defaultValue - if (oh ne _hashes) { - i = seekEntryOrOpen(h, key) - if (i >= 0) _size -= 1 - } - ans - } - _size += 1 - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key.asInstanceOf[AnyRef] - _values(j) = value.asInstanceOf[AnyRef] - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - value - } - else _values(i).asInstanceOf[V] - } - - /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ - def getOrNull(key: K): V = { - val i = seekEntry(hashOf(key), key) - (if (i < 0) null else _values(i)).asInstanceOf[V] - } - - /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead; an exception will be thrown if no - * `defaultEntry` was supplied. - */ - override def apply(key: K): V = { - val i = seekEntry(hashOf(key), key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] - } - - /** Defers to defaultEntry to find a default value for the key. Throws an - * exception if no other default behavior was specified. - */ - override def default(key: K): V = defaultEntry(key) - - private def repack(newMask: Int): Unit = { - val oh = _hashes - val ok = _keys - val ov = _values - mask = newMask - _hashes = new Array[Int](mask+1) - _keys = new Array[AnyRef](mask+1) - _values = new Array[AnyRef](mask+1) - _vacant = 0 - var i = 0 - while (i < oh.length) { - val h = oh(i) - if (h+h != 0) { - var e = h & mask - var x = 0 - while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - _hashes(e) = h - _keys(e) = ok(i) - _values(e) = ov(i) - } - i += 1 - } - } - - /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. - * - * For maps that undergo a complex creation process with both addition and - * removal of keys, and then are used heavily with no further removal of - * elements, calling `repack` after the end of the creation can result in - * improved performance. Repacking takes time proportional to the number - * of entries in the map. - */ - def repack(): Unit = { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } - - override def put(key: K, value: V): Option[V] = { - val h = hashOf(key) - val i = seekEntryOrOpen(h, key) - if (i < 0) { - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - None - } - else { - val ans = Some(_values(i).asInstanceOf[V]) - _hashes(i) = h - _values(i) = value.asInstanceOf[AnyRef] - ans - } - } - - /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to an `AnyRefMap`. - */ - override def update(key: K, value: V): Unit = { - val h = hashOf(key) - val i = seekEntryOrOpen(h, key) - if (i < 0) { - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - } - else { - _hashes(i) = h - _values(i) = value.asInstanceOf[AnyRef] - } - } - - /** Adds a new key/value pair to this map and returns the map. */ - @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") - def +=(key: K, value: V): this.type = { update(key, value); this } - - /** Adds a new key/value pair to this map and returns the map. */ - @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } - - @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } - - def subtractOne(key: K): this.type = { - val i = seekEntry(hashOf(key), key) - if (i >= 0) { - _size -= 1 - _vacant += 1 - _hashes(i) = Int.MinValue - _keys(i) = null - _values(i) = null - } - this - } - - def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { - protected def nextResult(k: K, v: V) = (k, v) - } - override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { - protected def nextResult(k: K, v: V) = k - } - override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { - protected def nextResult(k: K, v: V) = v - } - - private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { - private[this] val hz = _hashes - private[this] val kz = _keys - private[this] val vz = _values - - private[this] var index = 0 - - def hasNext: Boolean = index= hz.length) return false - h = hz(index) - } - true - } - - def next(): A = { - if (hasNext) { - val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) - index += 1 - ans - } - else throw new NoSuchElementException("next") - } - - protected def nextResult(k: K, v: V): A - } - - - override def foreach[U](f: ((K,V)) => U): Unit = { - var i = 0 - var e = _size - while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { - f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) - i += 1 - e -= 1 - } - else return - } - } - - override def foreachEntry[U](f: (K,V) => U): Unit = { - var i = 0 - var e = _size - while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { - f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) - i += 1 - e -= 1 - } - else return - } - } - - override def clone(): AnyRefMap[K, V] = { - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) - val arm = new AnyRefMap[K, V](defaultEntry, 1, false) - arm.initializeTo(mask, _size, _vacant, hz, kz, vz) - arm - } - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [sealed V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [sealed V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { - val m = this + elem1 + elem2 - if(elems.isEmpty) m else m.concat(elems) - } - - override def concat[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V2]] - xs.iterator.foreach(kv => arm += kv) - arm - } - - override def ++[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs) - - @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") - override def updated[sealed V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = - clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) - - private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - f(elems(i).asInstanceOf[A]) - } - i += 1 - } - } - - /** Applies a function to all keys of this map. */ - def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) - - /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) - - /** Creates a new `AnyRefMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ - def mapValuesNow[sealed V1](f: V => V1): AnyRefMap[K, V1] = { - val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - arm.initializeTo(mask, _size, _vacant, hz, kz, vz) - arm - } - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") - @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValuesInPlace(f: V => V): this.type = { - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - this - } - - // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) - def map[K2 <: AnyRef, sealed V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.Map(this, f)) - def flatMap[K2 <: AnyRef, sealed V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.FlatMap(this, f)) - def collect[K2 <: AnyRef, sealed V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) - - override def clear(): Unit = { - import java.util.Arrays.fill - fill(_keys, null) - fill(_values, null) - fill(_hashes, 0) - _size = 0 - _vacant = 0 - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "AnyRefMap" -} - -object AnyRefMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 - - private class ExceptionDefault extends (Any -> Nothing) with Serializable { - def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) - } - private val exceptionDefault = new ExceptionDefault - - /** A builder for instances of `AnyRefMap`. - * - * This builder can be reused to create multiple instances. - */ - final class AnyRefMapBuilder[K <: AnyRef, sealed V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { - private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] - def addOne(entry: (K, V)): this.type = { - elems += entry - this - } - def clear(): Unit = elems = new AnyRefMap[K, V] - def result(): AnyRefMap[K, V] = elems - override def knownSize: Int = elems.knownSize - } - - /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ - def apply[K <: AnyRef, sealed V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) - - def newBuilder[K <: AnyRef, sealed V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] - - private def buildFromIterableOnce[K <: AnyRef, sealed V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = { - var sz = elems.knownSize - if(sz < 0) sz = 4 - val arm = new AnyRefMap[K, V](sz * 2) - elems.iterator.foreach{ case (k,v) => arm(k) = v } - if (arm.size < (sz>>3)) arm.repack() - arm - } - - /** Creates a new empty `AnyRefMap`. */ - def empty[K <: AnyRef, sealed V]: AnyRefMap[K, V] = new AnyRefMap[K, V] - - /** Creates a new empty `AnyRefMap` with the supplied default */ - def withDefault[K <: AnyRef, sealed V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) - - /** Creates a new `AnyRefMap` from an existing source collection. A source collection - * which is already an `AnyRefMap` gets cloned. - * - * @param source Source collection - * @tparam K the type of the keys - * @tparam V the type of the values - * @return a new `AnyRefMap` with the elements of `source` - */ - def from[K <: AnyRef, sealed V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match { - case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] - case _ => buildFromIterableOnce(source) - } - - /** Creates a new `AnyRefMap` from arrays of keys and values. - * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. - */ - def fromZip[K <: AnyRef, sealed V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { - val sz = math.min(keys.length, values.length) - val arm = new AnyRefMap[K, V](sz * 2) - var i = 0 - while (i < sz) { arm(keys(i)) = values(i); i += 1 } - if (arm.size < (sz>>3)) arm.repack() - arm - } - - /** Creates a new `AnyRefMap` from keys and values. - * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. - */ - def fromZip[K <: AnyRef, sealed V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = { - val sz = math.min(keys.size, values.size) - val arm = new AnyRefMap[K, V](sz * 2) - val ki = keys.iterator - val vi = values.iterator - while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() - if (arm.size < (sz >> 3)) arm.repack() - arm - } - - implicit def toFactory[K <: AnyRef, sealed V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) - def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] - } - - implicit def toBuildFrom[K <: AnyRef, sealed V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] - private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it) - def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] - } - - implicit def iterableFactory[K <: AnyRef, sealed V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) - implicit def buildFromAnyRefMap[K <: AnyRef, sealed V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala deleted file mode 100644 index 8fa1e6edd566..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala +++ /dev/null @@ -1,406 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import java.util.Arrays - -import scala.annotation.nowarn -import scala.annotation.tailrec -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** An implementation of the `Buffer` class using an array to - * represent the assembled sequence internally. Append, update and random - * access take constant time (amortized time). Prepends and removes are - * linear in the buffer size. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] - * section on `Array Buffers` for more information. - - * - * @tparam A the type of this arraybuffer's elements. - * - * @define Coll `mutable.ArrayBuffer` - * @define coll array buffer - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-1582447879429021880L) -class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize: Int) - extends AbstractBuffer[A] - with IndexedBuffer[A] - with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] - with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] - with IterableFactoryDefaults[A, ArrayBuffer] - with DefaultSerializable { - - def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) - - def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) - - @transient private[this] var mutationCount: Int = 0 - - // needs to be `private[collection]` or `protected[collection]` for parallel-collections - protected[collection] var array: Array[AnyRef] = initialElements - protected var size0 = initialSize - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) - } - - override def knownSize: Int = super[IndexedSeqOps].knownSize - - /** Ensure that the internal array has at least `n` cells. */ - protected def ensureSize(n: Int): Unit = { - array = ArrayBuffer.ensureSize(array, size0, n) - } - - // TODO 3.T: should be `protected`, perhaps `protected[this]` - /** Ensure that the internal array has at least `n` additional cells more than `size0`. */ - private[mutable] def ensureAdditionalSize(n: Int): Unit = { - // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow - array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n) - } - - def sizeHint(size: Int): Unit = - if(size > length && size >= 1) ensureSize(size) - - /** Reduce length to `n`, nulling out all dropped elements */ - private def reduceToSize(n: Int): Unit = { - mutationCount += 1 - Arrays.fill(array, n, size0, null) - size0 = n - } - - /** Trims the ArrayBuffer to an appropriate size for the current - * number of elements (rounding up to the next natural size), - * which may replace the array by a shorter one. - * This allows releasing some unused memory. - */ - def trimToSize(): Unit = { - resize(length) - } - - /** Trims the `array` buffer size down to either a power of 2 - * or Int.MaxValue while keeping first `requiredLength` elements. - */ - private def resize(requiredLength: Int): Unit = - array = ArrayBuffer.downsize(array, requiredLength) - - @inline private def checkWithinBounds(lo: Int, hi: Int) = { - if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})") - if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})") - } - - def apply(n: Int): A = { - checkWithinBounds(n, n + 1) - array(n).asInstanceOf[A] - } - - def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { - checkWithinBounds(index, index + 1) - mutationCount += 1 - array(index) = elem.asInstanceOf[AnyRef] - } - - def length = size0 - - // TODO: return `IndexedSeqView` rather than `ArrayBufferView` - override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) - - override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer - - /** Note: This does not actually resize the internal representation. - * See clearAndShrink if you want to also resize internally - */ - def clear(): Unit = reduceToSize(0) - - /** - * Clears this buffer and shrinks to @param size (rounding up to the next - * natural size) - * @param size - */ - def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { - clear() - resize(size) - this - } - - def addOne(elem: A): this.type = { - mutationCount += 1 - ensureAdditionalSize(1) - val oldSize = size0 - size0 = oldSize + 1 - this(oldSize) = elem - this - } - - // Overridden to use array copying for efficiency where possible. - override def addAll(elems: IterableOnce[A]^): this.type = { - elems match { - case elems: ArrayBuffer[_] => - val elemsLength = elems.size0 - if (elemsLength > 0) { - mutationCount += 1 - ensureAdditionalSize(elemsLength) - Array.copy(elems.array, 0, array, length, elemsLength) - size0 = length + elemsLength - } - case _ => super.addAll(elems) - } - this - } - - def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { - checkWithinBounds(index, index) - mutationCount += 1 - ensureAdditionalSize(1) - Array.copy(array, index, array, index + 1, size0 - index) - size0 += 1 - this(index) = elem - } - - def prepend(elem: A): this.type = { - insert(0, elem) - this - } - - def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = { - checkWithinBounds(index, index) - elems match { - case elems: collection.Iterable[A] => - val elemsLength = elems.size - if (elemsLength > 0) { - mutationCount += 1 - ensureAdditionalSize(elemsLength) - val len = size0 - Array.copy(array, index, array, index + elemsLength, len - index) - // if `elems eq this`, this copy is safe because - // - `elems.array eq this.array` - // - we didn't overwrite the values being inserted after moving them in - // the previous line - // - `copyElemsToArray` will call `System.arraycopy` - // - `System.arraycopy` will effectively "read" all the values before - // overwriting any of them when two arrays are the the same reference - val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) - if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") - size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy - } - case _ => insertAll(index, ArrayBuffer.from(elems)) - } - } - - /** Note: This does not actually resize the internal representation. - * See trimToSize if you want to also resize internally - */ - def remove(@deprecatedName("n", "2.13.0") index: Int): A = { - checkWithinBounds(index, index + 1) - val res = this(index) - Array.copy(array, index + 1, array, index, size0 - (index + 1)) - reduceToSize(size0 - 1) - res - } - - /** Note: This does not actually resize the internal representation. - * See trimToSize if you want to also resize internally - */ - def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = - if (count > 0) { - checkWithinBounds(index, index + count) - Array.copy(array, index + count, array, index, size0 - (index + count)) - reduceToSize(size0 - count) - } else if (count < 0) { - throw new IllegalArgumentException("removing negative number of elements: " + count) - } - - @deprecated("Use 'this' instance instead", "2.13.0") - @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def result(): this.type = this - - @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") - @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "ArrayBuffer" - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if(copied > 0) { - Array.copy(array, 0, xs, start, copied) - } - copied - } - - /** Sorts this $coll in place according to an Ordering. - * - * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] - * @param ord the ordering to be used to compare elements. - * @return modified input $coll sorted according to the ordering `ord`. - */ - override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) { - mutationCount += 1 - scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]], 0, length) - } - this - } - - @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = - if (start == end) z - else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) - - @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = - if (start == end) z - else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) - - override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) - - override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) - - override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op) - - override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op) -} - -/** - * Factory object for the `ArrayBuffer` class. - * - * $factoryInfo - * - * @define coll array buffer - * @define Coll `mutable.ArrayBuffer` - */ -@SerialVersionUID(3L) -object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { - final val DefaultInitialSize = 16 - private[this] val emptyArray = new Array[AnyRef](0) - - def from[sealed B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = { - val k = coll.knownSize - if (k >= 0) { - // Avoid reallocation of buffer if length is known - val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit - val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) - if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") - new ArrayBuffer[B](array, k) - } - else new ArrayBuffer[B] ++= coll - } - - def newBuilder[sealed A]: Builder[A, ArrayBuffer[A]] = - new GrowableBuilder[A, ArrayBuffer[A]](empty) { - override def sizeHint(size: Int): Unit = elems.ensureSize(size) - } - - def empty[sealed A]: ArrayBuffer[A] = new ArrayBuffer[A]() - - /** - * @param arrayLen the length of the backing array - * @param targetLen the minimum length to resize up to - * @return -1 if no resizing is needed, or the size for the new array otherwise - */ - private def resizeUp(arrayLen: Long, targetLen: Long): Int = { - if (targetLen <= arrayLen) -1 - else { - if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements") - IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue` - - val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) - math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt - } - } - // if necessary, copy (curSize elements of) the array to a new array of capacity n. - // Should use Array.copyOf(array, resizeEnsuring(array.length))? - private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = { - val newLen = resizeUp(array.length, targetSize) - if (newLen < 0) array - else { - val res = new Array[AnyRef](newLen) - System.arraycopy(array, 0, res, 0, curSize) - res - } - } - - /** - * @param arrayLen the length of the backing array - * @param targetLen the length to resize down to, if smaller than `arrayLen` - * @return -1 if no resizing is needed, or the size for the new array otherwise - */ - private def resizeDown(arrayLen: Int, targetLen: Int): Int = - if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) - private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { - val newLen = resizeDown(array.length, targetSize) - if (newLen < 0) array - else if (newLen == 0) emptyArray - else { - val res = new Array[AnyRef](newLen) - System.arraycopy(array, 0, res, 0, targetSize) - res - } - } -} - -// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` -final class ArrayBufferView[sealed A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int) - extends AbstractIndexedSeqView[A], Pure { - /* Removed since it poses problems for capture checking - @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") - def this(array: Array[AnyRef], length: Int) = { - // this won't actually track mutation, but it would be a pain to have the implementation - // check if we have a method to get the current mutation count or not on every method and - // change what it does based on that. hopefully no one ever calls this. - this({ - val _array: Array[Object] = array - val _length = length - new ArrayBuffer[A](0) { - this.array = _array - this.size0 = _length - }: ArrayBuffer[A] - }, () => 0) - }*/ - - @deprecated("never intended to be public", since = "2.13.7") - def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] - - @throws[IndexOutOfBoundsException] - def apply(n: Int): A = underlying(n) - def length: Int = underlying.length - override protected[this] def className = "ArrayBufferView" - - // we could inherit all these from `CheckedIndexedSeqView`, except this class is public - override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) - override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) - - override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) - override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) - override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) - override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) - override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) - override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) - override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) - override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) - - override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala deleted file mode 100644 index 0620d3d23061..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala +++ /dev/null @@ -1,523 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import language.experimental.captureChecking -import scala.reflect.ClassTag - -/** A builder class for arrays. - * - * @tparam T the type of the elements for the builder. - */ -@SerialVersionUID(3L) -sealed abstract class ArrayBuilder[sealed T] - extends ReusableBuilder[T, Array[T]] - with Serializable { - protected[this] var capacity: Int = 0 - protected[this] def elems: Array[T] - protected var size: Int = 0 - - def length: Int = size - - override def knownSize: Int = size - - protected[this] final def ensureSize(size: Int): Unit = { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - override final def sizeHint(size: Int): Unit = - if (capacity < size) resize(size) - - def clear(): Unit = size = 0 - - protected[this] def resize(size: Int): Unit - - /** Add all elements of an array */ - def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) - - /** Add a slice of an array */ - def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { - ensureSize(this.size + length) - Array.copy(xs, offset, elems, this.size, length) - size += length - this - } - - override def addAll(xs: IterableOnce[T]^): this.type = { - val k = xs.knownSize - if (k > 0) { - ensureSize(this.size + k) - val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) - if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") - size += k - } else if (k < 0) super.addAll(xs) - this - } -} - -/** A companion object for array builders. - */ -object ArrayBuilder { - - /** Creates a new arraybuilder of type `T`. - * - * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. - * @return a new empty array builder. - */ - @inline def make[T: ClassTag]: ArrayBuilder[T] = { - val tag = implicitly[ClassTag[T]] - tag.runtimeClass match { - case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] - case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] - case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] - case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] - case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] - case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] - case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] - case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] - case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] - case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] - } - } - - /** A class for array builders for arrays of reference types. - * - * This builder can be reused. - * - * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. - */ - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { - - protected var elems: Array[T] = _ - - private def mkArray(size: Int): Array[T] = { - if (capacity == size && capacity > 0) elems - else if (elems eq null) new Array[T](size) - else java.util.Arrays.copyOf[T](elems, size) - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: T): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[T] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def clear(): Unit = { - super.clear() - if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) - } - - override def equals(other: Any): Boolean = other match { - case x: ofRef[_] => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofRef" - } - - /** A class for array builders for arrays of `byte`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofByte extends ArrayBuilder[Byte] { - - protected var elems: Array[Byte] = _ - - private def mkArray(size: Int): Array[Byte] = { - val newelems = new Array[Byte](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Byte): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Byte] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofByte => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofByte" - } - - /** A class for array builders for arrays of `short`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofShort extends ArrayBuilder[Short] { - - protected var elems: Array[Short] = _ - - private def mkArray(size: Int): Array[Short] = { - val newelems = new Array[Short](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Short): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Short] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofShort => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofShort" - } - - /** A class for array builders for arrays of `char`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofChar extends ArrayBuilder[Char] { - - protected var elems: Array[Char] = _ - - private def mkArray(size: Int): Array[Char] = { - val newelems = new Array[Char](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Char): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Char] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofChar => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofChar" - } - - /** A class for array builders for arrays of `int`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofInt extends ArrayBuilder[Int] { - - protected var elems: Array[Int] = _ - - private def mkArray(size: Int): Array[Int] = { - val newelems = new Array[Int](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Int): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Int] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofInt => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofInt" - } - - /** A class for array builders for arrays of `long`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofLong extends ArrayBuilder[Long] { - - protected var elems: Array[Long] = _ - - private def mkArray(size: Int): Array[Long] = { - val newelems = new Array[Long](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Long): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Long] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofLong => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofLong" - } - - /** A class for array builders for arrays of `float`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofFloat extends ArrayBuilder[Float] { - - protected var elems: Array[Float] = _ - - private def mkArray(size: Int): Array[Float] = { - val newelems = new Array[Float](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Float): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Float] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofFloat => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofFloat" - } - - /** A class for array builders for arrays of `double`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofDouble extends ArrayBuilder[Double] { - - protected var elems: Array[Double] = _ - - private def mkArray(size: Int): Array[Double] = { - val newelems = new Array[Double](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Double): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Double] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofDouble => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofDouble" - } - - /** A class for array builders for arrays of `boolean`s. It can be reused. */ - @SerialVersionUID(3L) - class ofBoolean extends ArrayBuilder[Boolean] { - - protected var elems: Array[Boolean] = _ - - private def mkArray(size: Int): Array[Boolean] = { - val newelems = new Array[Boolean](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Boolean): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Boolean] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofBoolean => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofBoolean" - } - - /** A class for array builders for arrays of `Unit` type. It can be reused. */ - @SerialVersionUID(3L) - final class ofUnit extends ArrayBuilder[Unit] { - - protected def elems: Array[Unit] = throw new UnsupportedOperationException() - - def addOne(elem: Unit): this.type = { - size += 1 - this - } - - override def addAll(xs: IterableOnce[Unit]^): this.type = { - size += xs.iterator.size - this - } - - override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { - size += length - this - } - - def result() = { - val ans = new Array[Unit](size) - var i = 0 - while (i < size) { ans(i) = (); i += 1 } - ans - } - - override def equals(other: Any): Boolean = other match { - case x: ofUnit => (size == x.size) - case _ => false - } - - protected[this] def resize(size: Int): Unit = () - - override def toString = "ArrayBuilder.ofUnit" - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala deleted file mode 100644 index f22aacec65c5..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala +++ /dev/null @@ -1,646 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.reflect.ClassTag -import language.experimental.captureChecking - -/** An implementation of a double-ended queue that internally uses a resizable circular buffer. - * - * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) - * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) - * and thus insertions and removals from end/beginning are fast. - * - * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. - * - * @tparam A the type of this ArrayDeque's elements. - * - * @define Coll `mutable.ArrayDeque` - * @define coll array deque - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class ArrayDeque[sealed A] protected ( - protected var array: Array[AnyRef], - private[ArrayDeque] var start: Int, - private[ArrayDeque] var end: Int -) extends AbstractBuffer[A] - with IndexedBuffer[A] - with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] - with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] - with IterableFactoryDefaults[A, ArrayDeque] - with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] - with Cloneable[ArrayDeque[A]] - with DefaultSerializable { - - reset(array, start, end) - - private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { - assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") - requireBounds(idx = start, until = array.length) - requireBounds(idx = end, until = array.length) - this.array = array - this.start = start - this.end = end - } - - def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - - override def knownSize: Int = super[IndexedSeqOps].knownSize - - // No-Op override to allow for more efficient stepper in a minor release. - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) - - def apply(idx: Int): A = { - requireBounds(idx) - _get(idx) - } - - def update(idx: Int, elem: A): Unit = { - requireBounds(idx) - _set(idx, elem) - } - - def addOne(elem: A): this.type = { - ensureSize(length + 1) - appendAssumingCapacity(elem) - } - - def prepend(elem: A): this.type = { - ensureSize(length + 1) - prependAssumingCapacity(elem) - } - - @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { - array(end) = elem.asInstanceOf[AnyRef] - end = end_+(1) - this - } - - @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { - start = start_-(1) - array(start) = elem.asInstanceOf[AnyRef] - this - } - - override def prependAll(elems: IterableOnce[A]^): this.type = { - val it = elems.iterator - if (it.nonEmpty) { - val n = length - // The following code resizes the current collection at most once and traverses elems at most twice - elems.knownSize match { - // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq - case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) - - // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront - case srcLength if mustGrow(srcLength + n) => - val finalLength = srcLength + n - val array2 = ArrayDeque.alloc(finalLength) - it.copyToArray(array2.asInstanceOf[Array[A]]) - copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - - // Just fill up from (start - srcLength) to (start - 1) and move back start - case srcLength => - // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` - var i = 0 - while(i < srcLength) { - _set(i - srcLength, it.next()) - i += 1 - } - start = start_-(srcLength) - } - } - this - } - - override def addAll(elems: IterableOnce[A]^): this.type = { - elems.knownSize match { - case srcLength if srcLength > 0 => - ensureSize(srcLength + length) - elems.iterator.foreach(appendAssumingCapacity) - case _ => elems.iterator.foreach(+=) - } - this - } - - def insert(idx: Int, elem: A): Unit = { - requireBounds(idx, length+1) - val n = length - if (idx == 0) { - prepend(elem) - } else if (idx == n) { - addOne(elem) - } else { - val finalLength = n + 1 - if (mustGrow(finalLength)) { - val array2 = ArrayDeque.alloc(finalLength) - copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) - array2(idx) = elem.asInstanceOf[AnyRef] - copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - } else if (n <= idx * 2) { - var i = n - 1 - while(i >= idx) { - _set(i + 1, _get(i)) - i -= 1 - } - end = end_+(1) - i += 1 - _set(i, elem) - } else { - var i = 0 - while(i < idx) { - _set(i - 1, _get(i)) - i += 1 - } - start = start_-(1) - _set(i, elem) - } - } - } - - def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { - requireBounds(idx, length+1) - val n = length - if (idx == 0) { - prependAll(elems) - } else if (idx == n) { - addAll(elems) - } else { - // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) - val (it, srcLength) = { - val _srcLength = elems.knownSize - if (_srcLength >= 0) (elems.iterator, _srcLength) - else { - val indexed = IndexedSeq.from(elems) - (indexed.iterator, indexed.size) - } - } - if (it.nonEmpty) { - val finalLength = srcLength + n - // Either we resize right away or move prefix left or suffix right - if (mustGrow(finalLength)) { - val array2 = ArrayDeque.alloc(finalLength) - copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) - it.copyToArray(array2.asInstanceOf[Array[A]], idx) - copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - } else if (2*idx >= n) { // Cheaper to shift the suffix right - var i = n - 1 - while(i >= idx) { - _set(i + srcLength, _get(i)) - i -= 1 - } - end = end_+(srcLength) - while(it.hasNext) { - i += 1 - _set(i, it.next()) - } - } else { // Cheaper to shift prefix left - var i = 0 - while(i < idx) { - _set(i - srcLength, _get(i)) - i += 1 - } - start = start_-(srcLength) - while(it.hasNext) { - _set(i, it.next()) - i += 1 - } - } - } - } - } - - def remove(idx: Int, count: Int): Unit = { - if (count > 0) { - requireBounds(idx) - val n = length - val removals = Math.min(n - idx, count) - val finalLength = n - removals - val suffixStart = idx + removals - // If we know we can resize after removing, do it right away using arrayCopy - // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left - if (shouldShrink(finalLength)) { - val array2 = ArrayDeque.alloc(finalLength) - copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) - copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - } else if (2*idx <= finalLength) { // Cheaper to move the prefix right - var i = suffixStart - 1 - while(i >= removals) { - _set(i, _get(i - removals)) - i -= 1 - } - while(i >= 0) { - _set(i, null.asInstanceOf[A]) - i -= 1 - } - start = start_+(removals) - } else { // Cheaper to move the suffix left - var i = idx - while(i < finalLength) { - _set(i, _get(i + removals)) - i += 1 - } - while(i < n) { - _set(i, null.asInstanceOf[A]) - i += 1 - } - end = end_-(removals) - } - } else { - require(count == 0, s"removing negative number of elements: $count") - } - } - - def remove(idx: Int): A = { - val elem = this(idx) - remove(idx, 1) - elem - } - - override def subtractOne(elem: A): this.type = { - val idx = indexOf(elem) - if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API - this - } - - /** - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @return - */ - def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = - if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) - - /** - * Unsafely remove the first element (throws exception when empty) - * See also removeHeadOption() - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @throws NoSuchElementException when empty - * @return - */ - def removeHead(resizeInternalRepr: Boolean = false): A = - if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) - - @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { - val elem = array(start) - array(start) = null - start = start_+(1) - if (resizeInternalRepr) resize(length) - elem.asInstanceOf[A] - } - - /** - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @return - */ - def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = - if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) - - /** - * Unsafely remove the last element (throws exception when empty) - * See also removeLastOption() - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @throws NoSuchElementException when empty - * @return - */ - def removeLast(resizeInternalRepr: Boolean = false): A = - if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) - - @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { - end = end_-(1) - val elem = array(end) - array(end) = null - if (resizeInternalRepr) resize(length) - elem.asInstanceOf[A] - } - - /** - * Remove all elements from this collection and return the elements while emptying this data structure - * @return - */ - def removeAll(): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - elems.sizeHint(length) - while(nonEmpty) { - elems += removeHeadAssumingNonEmpty() - } - elems.result() - } - - /** - * Remove all elements from this collection and return the elements in reverse while emptying this data structure - * @return - */ - def removeAllReverse(): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - elems.sizeHint(length) - while(nonEmpty) { - elems += removeLastAssumingNonEmpty() - } - elems.result() - } - - /** - * Returns and removes all elements from the left of this queue which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return - */ - def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - while(headOption.exists(f)) { - elems += removeHeadAssumingNonEmpty() - } - elems.result() - } - - /** - * Returns and removes all elements from the right of this queue which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return - */ - def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - while(lastOption.exists(f)) { - elems += removeLastAssumingNonEmpty() - } - elems.result() - } - - /** Returns the first element which satisfies the given predicate after or at some start index - * and removes this element from the collections - * - * @param p the predicate used for choosing the first element - * @param from the start index - * @return the first element of the queue for which p yields true - */ - def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { - val i = indexWhere(p, from) - if (i < 0) None else Some(remove(i)) - } - - /** Returns all elements in this collection which satisfy the given predicate - * and removes those elements from this collections. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { - val res = scala.collection.immutable.Seq.newBuilder[A] - var i, j = 0 - while (i < size) { - if (p(this(i))) { - res += this(i) - } else { - if (i != j) { - this(j) = this(i) - } - j += 1 - } - i += 1 - } - if (i != j) takeInPlace(j) - res.result() - } - - @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) - - def length = end_-(start) - - override def isEmpty = start == end - - override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) - - override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque - - /** - * Note: This does not actually resize the internal representation. - * See clearAndShrink if you want to also resize internally - */ - def clear(): Unit = { - while(nonEmpty) { - removeHeadAssumingNonEmpty() - } - } - - /** - * Clears this buffer and shrinks to @param size - * - * @param size - * @return - */ - def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { - reset(array = ArrayDeque.alloc(size), start = 0, end = 0) - this - } - - protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = - new ArrayDeque[A](array, start = 0, end) - - override def copyToArray[sealed B >: A](dest: Array[B], destStart: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) - if (copied > 0) { - copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) - } - copied - } - - override def toArray[sealed B >: A: ClassTag]: Array[B] = - copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) - - /** - * Trims the capacity of this ArrayDeque's instance to be the current size - */ - def trimToSize(): Unit = resize(length) - - // Utils for common modular arithmetic: - @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) - @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) - @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) - @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) - - // Note: here be overflow dragons! This is used for int overflow - // assumptions in resize(). Use caution changing. - @inline private[this] def mustGrow(len: Int) = { - len >= array.length - } - - // Assumes that 0 <= len < array.length! - @inline private[this] def shouldShrink(len: Int) = { - // To avoid allocation churn, only shrink when array is large - // and less than 2/5 filled. - array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len - } - - // Assumes that 0 <= len < array.length! - @inline private[this] def canShrink(len: Int) = { - array.length > ArrayDeque.DefaultInitialSize && array.length - len > len - } - - @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] - - @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] - - // Assumes that 0 <= len. - private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { - val n = length - val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) - reset(array = array2, start = 0, end = n) - } - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "ArrayDeque" -} - -/** - * $factoryInfo - * @define coll array deque - * @define Coll `ArrayDeque` - */ -@SerialVersionUID(3L) -object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { - - def from[sealed B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = { - val s = coll.knownSize - if (s >= 0) { - val array = alloc(s) - val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) - if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") - new ArrayDeque[B](array, start = 0, end = s) - } else new ArrayDeque[B]() ++= coll - } - - def newBuilder[sealed A]: Builder[A, ArrayDeque[A]] = - new GrowableBuilder[A, ArrayDeque[A]](empty) { - override def sizeHint(size: Int): Unit = { - elems.ensureSize(size) - } - } - - def empty[sealed A]: ArrayDeque[A] = new ArrayDeque[A]() - - final val DefaultInitialSize = 16 - - /** - * We try to not repeatedly resize arrays smaller than this - */ - private[ArrayDeque] final val StableSize = 128 - - /** - * Allocates an array whose size is next power of 2 > `len` - * Largest possible len is 1<<30 - 1 - * - * @param len - * @return - */ - private[mutable] def alloc(len: Int) = { - require(len >= 0, s"Non-negative array size required") - val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 - require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") - new Array[AnyRef](Math.max(size, DefaultInitialSize)) - } -} - -trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { - protected def array: Array[AnyRef] - - final override def clone(): C = klone() - - protected def klone(): C - - protected def ofArray(array: Array[AnyRef], end: Int): C - - protected def start_+(idx: Int): Int - - @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = - if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})") - - /** - * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray - * This copies maxItems elements from this collections srcStart to dest's destStart - * If we reach the end of either collections before we could copy maxItems, we simply stop copying - * - * @param dest - * @param srcStart - * @param destStart - * @param maxItems - */ - def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { - requireBounds(destStart, dest.length+1) - val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) - if (toCopy > 0) { - requireBounds(srcStart) - val startIdx = start_+(srcStart) - val block1 = Math.min(toCopy, array.length - startIdx) - Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) - val block2 = toCopy - block1 - if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) - } - dest - } - - override def reverse: C = { - val n = length - val arr = ArrayDeque.alloc(n) - var i = 0 - while(i < n) { - arr(i) = this(n - i - 1).asInstanceOf[AnyRef] - i += 1 - } - ofArray(arr, n) - } - - override def slice(from: Int, until: Int): C = { - val n = length - val left = Math.max(0, Math.min(n, from)) - val right = Math.max(0, Math.min(n, until)) - val len = right - left - if (len <= 0) { - empty - } else if (len >= n) { - klone() - } else { - val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) - ofArray(array2, len) - } - } - - override def sliding(window: Int, step: Int): Iterator[C] = { - require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive") - length match { - case 0 => Iterator.empty - case n if n <= window => Iterator.single(slice(0, length)) - case n => - val lag = if (window > step) window - step else 0 - Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window)) - } - } - - override def grouped(n: Int): Iterator[C] = sliding(n, n) -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala deleted file mode 100644 index bd3a208a94c0..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import java.util.Arrays - -import scala.collection.Stepper.EfficientSplit -import scala.collection.convert.impl._ -import scala.reflect.ClassTag -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** - * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same - * underlying `Array`, therefore it is not growable or shrinkable. - * - * @tparam T type of the elements in this wrapped array. - * - * @define Coll `ArraySeq` - * @define coll wrapped array - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3L) -sealed abstract class ArraySeq[sealed T] - extends AbstractSeq[T] - with IndexedSeq[T] - with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] - with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] - with Serializable - with Pure { - - override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged - - override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { - val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] - val s = coll.knownSize - if(s > 0) b.sizeHint(s) - b ++= coll - ArraySeq.make(b.result()) - } - override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] - override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) - - /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive - * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype - * or subtype of the element type. */ - def elemTag: ClassTag[_] - - /** Update element at given index */ - def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit - - /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive - * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype - * or subtype of the element type. */ - def array: Array[_] - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit - - override protected[this] def className = "ArraySeq" - - /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]]) - - override def copyToArray[sealed B >: T](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if(copied > 0) { - Array.copy(array, 0, xs, start, copied) - } - copied - } - - override def equals(other: Any): Boolean = other match { - case that: ArraySeq[_] if this.array.length != that.array.length => - false - case _ => - super.equals(other) - } - - override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] - - override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]]) - this - } -} - -/** A companion object used to create instances of `ArraySeq`. - */ -@SerialVersionUID(3L) -object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => - val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) - - // This is reused for all calls to empty. - private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) - def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - - def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it)) - - def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) - - /** - * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type - * without copying. - * - * Note that an array containing boxed primitives can be converted to a `ArraySeq` without - * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, - * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: - * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still - * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing - * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` - * at runtime. - */ - def make[sealed T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] - - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { - def elemTag = ClassTag[T](array.getClass.getComponentType) - def length: Int = array.length - def apply(index: Int): T = array(index) - def update(index: Int, elem: T): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofRef[_] => - Array.equals( - this.array.asInstanceOf[Array[AnyRef]], - that.array.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - new ObjectArrayStepper(array, 0, array.length) - else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { - def elemTag = ClassTag.Byte - def length: Int = array.length - def apply(index: Int): Byte = array(index) - def update(index: Int, elem: Byte): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) - else new WidenedByteArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { - def elemTag = ClassTag.Short - def length: Int = array.length - def apply(index: Int): Short = array(index) - def update(index: Int, elem: Short): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) - else new WidenedShortArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { - def elemTag = ClassTag.Char - def length: Int = array.length - def apply(index: Int): Char = array(index) - def update(index: Int, elem: Char): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) - else new WidenedCharArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { - val jsb = sb.underlying - if (start.length != 0) jsb.append(start) - val len = array.length - if (len != 0) { - if (sep.isEmpty) jsb.append(array) - else { - jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) - jsb.append(array(0)) - var i = 1 - while (i < len) { - jsb.append(sep) - jsb.append(array(i)) - i += 1 - } - } - } - if (end.length != 0) jsb.append(end) - sb - } - } - - @SerialVersionUID(3L) - final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { - def elemTag = ClassTag.Int - def length: Int = array.length - def apply(index: Int): Int = array(index) - def update(index: Int, elem: Int): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) - else new IntArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { - def elemTag = ClassTag.Long - def length: Int = array.length - def apply(index: Int): Long = array(index) - def update(index: Int, elem: Long): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) - else new LongArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { - def elemTag = ClassTag.Float - def length: Int = array.length - def apply(index: Int): Float = array(index) - def update(index: Int, elem: Float): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) - else new WidenedFloatArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { - def elemTag = ClassTag.Double - def length: Int = array.length - def apply(index: Int): Double = array(index) - def update(index: Int, elem: Double): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) - else new DoubleArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { - def elemTag = ClassTag.Boolean - def length: Int = array.length - def apply(index: Int): Boolean = array(index) - def update(index: Int, elem: Boolean): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = - new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { - def elemTag = ClassTag.Unit - def length: Int = array.length - def apply(index: Int): Unit = array(index) - def update(index: Int, elem: Unit): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofUnit => array.length == that.array.length - case _ => super.equals(that) - } - override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = - new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala deleted file mode 100644 index dcb8a157389b..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/BitSet.scala +++ /dev/null @@ -1,393 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.collection.immutable.Range -import BitSetOps.{LogWL, MaxSize} -import scala.annotation.implicitNotFound -import language.experimental.captureChecking - -/** - * A class for mutable bitsets. - * - * $bitsetinfo - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] - * section on `Mutable Bitsets` for more information. - * - * @define Coll `BitSet` - * @define coll bitset - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class BitSet(protected[collection] final var elems: Array[Long]) - extends AbstractSet[Int] - with SortedSet[Int] - with SortedSetOps[Int, SortedSet, BitSet] - with StrictOptimizedIterableOps[Int, Set, BitSet] - with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] - with collection.BitSet - with collection.BitSetOps[BitSet] - with Serializable { - - def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) - - def this() = this(0) - - override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - - def bitSetFactory = BitSet - - override def unsorted: Set[Int] = this - - protected[collection] final def nwords: Int = elems.length - - protected[collection] final def word(idx: Int): Long = - if (idx < nwords) elems(idx) else 0L - - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = - if (elems.length == 0) empty - else new BitSet(elems) - - def addOne(elem: Int): this.type = { - require(elem >= 0) - if (!contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - this - } - - def subtractOne(elem: Int): this.type = { - require(elem >= 0) - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } - this - } - - def clear(): Unit = { - elems = new Array[Long](elems.length) - } - - protected final def updateWord(idx: Int, w: Long): Unit = { - ensureCapacity(idx) - elems(idx) = w - } - - protected final def ensureCapacity(idx: Int): Unit = { - require(idx < MaxSize) - if (idx >= nwords) { - var newlen = nwords - while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) - val elems1 = new Array[Long](newlen) - Array.copy(elems, 0, elems1, 0, nwords) - elems = elems1 - } - } - - def unconstrained: collection.Set[Int] = this - - /** Updates this bitset to the union with another bitset by performing a bitwise "or". - * - * @param other the bitset to form the union with. - * @return the bitset itself. - */ - def |= (other: collection.BitSet): this.type = { - ensureCapacity(other.nwords - 1) - var i = 0 - val othernwords = other.nwords - while (i < othernwords) { - elems(i) = elems(i) | other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". - * - * @param other the bitset to form the intersection with. - * @return the bitset itself. - */ - def &= (other: collection.BitSet): this.type = { - // Different from other operations: no need to ensure capacity because - // anything beyond the capacity is 0. Since we use other.word which is 0 - // off the end, we also don't need to make sure we stay in bounds there. - var i = 0 - val thisnwords = nwords - while (i < thisnwords) { - elems(i) = elems(i) & other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". - * - * @param other the bitset to form the symmetric difference with. - * @return the bitset itself. - */ - def ^= (other: collection.BitSet): this.type = { - ensureCapacity(other.nwords - 1) - var i = 0 - val othernwords = other.nwords - while (i < othernwords) { - - elems(i) = elems(i) ^ other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". - * - * @param other the bitset to form the difference with. - * @return the bitset itself. - */ - def &~= (other: collection.BitSet): this.type = { - var i = 0 - val max = Math.min(nwords, other.nwords) - while (i < max) { - elems(i) = elems(i) & ~other.word(i) - i += 1 - } - this - } - - override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) - - def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) - - override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) - override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].map(f) - - override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) - override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].flatMap(f) - - override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) - override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].collect(pf) - - // necessary for disambiguation - override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = - super.zip(that) - - override def addAll(xs: IterableOnce[Int]^): this.type = xs match { - case bs: collection.BitSet => - this |= bs - case range: Range => - if (range.nonEmpty) { - val start = range.min - if (start >= 0) { - val end = range.max - val endIdx = end >> LogWL - ensureCapacity(endIdx) - - if (range.step == 1 || range.step == -1) { - val startIdx = start >> LogWL - val wordStart = startIdx * BitSetOps.WordLength - val wordMask = -1L << (start - wordStart) - - if (endIdx > startIdx) { - elems(startIdx) |= wordMask - java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) - elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) - } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) - } else super.addAll(range) - } else super.addAll(range) - } - this - - case sorted: collection.SortedSet[Int] => - // if `sorted` is using the regular Int ordering, ensure capacity for the largest - // element up front to avoid multiple resizing allocations - if (sorted.nonEmpty) { - val ord = sorted.ordering - if (ord eq Ordering.Int) { - ensureCapacity(sorted.lastKey >> LogWL) - } else if (ord eq Ordering.Int.reverse) { - ensureCapacity(sorted.firstKey >> LogWL) - } - val iter = sorted.iterator - while (iter.hasNext) { - addOne(iter.next()) - } - } - - this - - case other => - super.addAll(other) - } - - override def subsetOf(that: collection.Set[Int]): Boolean = that match { - case bs: collection.BitSet => - val thisnwords = this.nwords - val bsnwords = bs.nwords - val minWords = Math.min(thisnwords, bsnwords) - - // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there - var i = bsnwords - while (i < thisnwords) { - if (word(i) != 0L) return false - i += 1 - } - - // the higher range of `this` is all `0`s, fall back to lower range - var j = 0 - while (j < minWords) { - if ((word(j) & ~bs.word(j)) != 0L) return false - j += 1 - } - - true - case other => - super.subsetOf(other) - } - - override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match { - case bs: collection.BitSet => this &~= bs - case other => super.subtractAll(other) - } - - protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) - - override def diff(that: collection.Set[Int]): BitSet = that match { - case bs: collection.BitSet => - /* - * Algorithm: - * - * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with - * the fewer words. - * - * Array Shrinking: - * If `this` is not longer than `bs`, then since we must iterate through the full array of words, - * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new - * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` - */ - - val bsnwords = bs.nwords - val thisnwords = nwords - if (bsnwords >= thisnwords) { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = thisnwords - 1 - var currentWord = 0L - - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - i -= 1 - } - - if (i < 0) { - fromBitMaskNoCopy(Array(currentWord)) - } else { - val minimumNonZeroIndex: Int = i + 1 - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = word(i) & ~bs.word(i) - i -= 1 - } - fromBitMaskNoCopy(newArray) - } - } else { - // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index - val newElems = elems.clone() - var i = bsnwords - 1 - while (i >= 0) { - newElems(i) = word(i) & ~bs.word(i) - i -= 1 - } - fromBitMaskNoCopy(newElems) - } - case _ => super.diff(that) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word - // index which lets us avoid: - // * over-allocating -- the resulting array will be exactly the right size - // * multiple resizing allocations -- the array is allocated one time, not log(n) times. - var i = nwords - 1 - var newArray: Array[Long] = null - while (i >= 0) { - val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) - if (w != 0L) { - if (newArray eq null) { - newArray = new Array(i + 1) - } - newArray(i) = w - } - i -= 1 - } - if (newArray eq null) { - empty - } else { - fromBitMaskNoCopy(newArray) - } - } - - override def filterInPlace(p: Int => Boolean): this.type = { - val thisnwords = nwords - var i = 0 - while (i < thisnwords) { - elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) - i += 1 - } - this - } - - override def toBitMask: Array[Long] = elems.clone() -} - -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - - def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it) - - def empty: BitSet = new BitSet() - - def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else { - val a = java.util.Arrays.copyOf(elems, len) - new BitSet(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else new BitSet(elems) - } - - @SerialVersionUID(3L) - private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { - protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 0f472dc9ac82..0a70c75bac0c 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -15,11 +15,10 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures /** A `Buffer` is a growable and shrinkable `Seq`. */ -trait Buffer[sealed A] +trait Buffer[A] extends Seq[A] with SeqOps[A, Buffer, Buffer[A]] with Growable[A] @@ -186,7 +185,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[(IterableOnce[A]^) @uncheckedCaptures](s) + val newElems = new Array[IterableOnce[A]^](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala deleted file mode 100644 index 152b6cc9ffc7..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable -import language.experimental.captureChecking - -private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { - this: CheckedIndexedSeqView[A]^ => - - protected val mutationCount: () => Int - - override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) - override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) - - override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) - override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) - override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount) - override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) - override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) - override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) - override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount) - override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) - - override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) -} - -private[mutable] object CheckedIndexedSeqView { - import IndexedSeqView.SomeIndexedSeqOps - - @SerialVersionUID(3L) - private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) - extends IndexedSeqView.IndexedSeqViewIterator[A](self) { - private[this] val expectedCount = mutationCount - override def hasNext: Boolean = { - MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) - super.hasNext - } - } - - @SerialVersionUID(3L) - private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) - extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { - private[this] val expectedCount = mutationCount - override def hasNext: Boolean = { - MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) - super.hasNext - } - } - - @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int) - extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int) - extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] - - @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { - override def reverse: IndexedSeqView[A] = underlying match { - case x: IndexedSeqView[A] => x - case _ => super.reverse - } - } - - @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int) - extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { - protected val lo = from max 0 - protected val hi = (until max 0) min underlying.length - protected val len = (hi - lo) max 0 - @throws[IndexOutOfBoundsException] - def apply(i: Int): A = underlying(lo + i) - def length: Int = len - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala deleted file mode 100644 index 39149e98cbf0..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable -import language.experimental.captureChecking - -/** A trait for cloneable collections. - * - * @tparam C Type of the collection, covariant and with reference types as upperbound. - */ -trait Cloneable[+C <: AnyRef] extends scala.Cloneable { - override def clone(): C = super.clone().asInstanceOf[C] -} diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala deleted file mode 100644 index 2b27efb6eac1..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala +++ /dev/null @@ -1,889 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.{unchecked => uc} -import scala.annotation.{implicitNotFound, tailrec, unused} -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic.DefaultSerializationProxy -import scala.runtime.Statics -import language.experimental.captureChecking - -/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good - * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality - * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality - * of numeric types is not supported (similar to `AnyRefMap`). - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @define Coll `mutable.CollisionProofHashMap` - * @define coll mutable collision-proof hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class CollisionProofHashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) - extends AbstractMap[K, V] - with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- - with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] - with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- - - private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap - - def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) - - import CollisionProofHashMap.Node - private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] - private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] - - /** The actual hash table. */ - private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) - - /** The next size value at which to resize (capacity * load factor). */ - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def size: Int = contentSize - - @`inline` private[this] final def computeHash(o: K): Int = { - val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode - h ^ (h >>> 16) - } - - @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) - - override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) - override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] - - override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] - - override def contains(key: K): Boolean = findNode(key) ne null - - def get(key: K): Option[V] = findNode(key) match { - case null => None - case nd => Some(nd match { - case nd: LLNode @uc => nd.value - case nd: RBNode @uc => nd.value - }) - } - - @throws[NoSuchElementException] - override def apply(key: K): V = findNode(key) match { - case null => default(key) - case nd => nd match { - case nd: LLNode @uc => nd.value - case nd: RBNode @uc => nd.value - } - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val nd = findNode(key) - if (nd eq null) default else nd match { - case nd: LLNode @uc => nd.value - case n => n.asInstanceOf[RBNode].value - } - } - - @`inline` private[this] def findNode(elem: K): Node = { - val hash = computeHash(elem) - table(index(hash)) match { - case null => null - case n: LLNode @uc => n.getNode(elem, hash) - case n => n.asInstanceOf[RBNode].getNode(elem, hash) - } - } - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) - if(target > table.length) { - if(size == 0) reallocTable(target) - else growTable(target) - } - } - - override def update(key: K, value: V): Unit = put0(key, value, false) - - override def put(key: K, value: V): Option[V] = put0(key, value, true) match { - case null => None - case sm => sm - } - - def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } - - @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(key) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { - val res = table(idx) match { - case n: RBNode @uc => - insert(n, idx, key, hash, value) - case _old => - val old: LLNode = _old.asInstanceOf[LLNode] - if(old eq null) { - table(idx) = new LLNode(key, hash, value, null) - } else { - var remaining = CollisionProofHashMap.treeifyThreshold - var prev: LLNode = null - var n = old - while((n ne null) && n.hash <= hash && remaining > 0) { - if(n.hash == hash && key == n.key) { - val old = n.value - n.value = value - return (if(getOld) Some(old) else null) - } - prev = n - n = n.next - remaining -= 1 - } - if(remaining == 0) { - treeify(old, idx) - return put0(key, value, getOld, hash, idx) - } - if(prev eq null) table(idx) = new LLNode(key, hash, value, old) - else prev.next = new LLNode(key, hash, value, prev.next) - } - true - } - if(res) contentSize += 1 - if(res) Some(null.asInstanceOf[V]) else null //TODO - } - - private[this] def treeify(old: LLNode, idx: Int): Unit = { - table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) - var n: LLNode = old.next - while(n ne null) { - val root = table(idx).asInstanceOf[RBNode] - insertIntoExisting(root, idx, n.key, n.hash, n.value, root) - n = n.next - } - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - val k = xs.knownSize - if(k > 0) sizeHint(contentSize + k) - super.addAll(xs) - } - - // returns the old value or Statics.pfMarker if not found - private[this] def remove0(elem: K) : Any = { - val hash = computeHash(elem) - val idx = index(hash) - table(idx) match { - case null => Statics.pfMarker - case t: RBNode @uc => - val v = delete(t, idx, elem, hash) - if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 - v - case nd: LLNode @uc if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - contentSize -= 1 - nd.value - case nd: LLNode @uc => - // find an element that matches - var prev = nd - var next = nd.next - while((next ne null) && next.hash <= hash) { - if(next.hash == hash && next.key == elem) { - prev.next = next.next - contentSize -= 1 - return next.value - } - prev = next - next = next.next - } - Statics.pfMarker - } - } - - private[this] abstract class MapIterator[R] extends AbstractIterator[R] { - protected[this] def extract(node: LLNode): R - protected[this] def extract(node: RBNode): R - - private[this] var i = 0 - private[this] var node: Node = null - private[this] val len = table.length - - def hasNext: Boolean = { - if(node ne null) true - else { - while(i < len) { - val n = table(i) - i += 1 - n match { - case null => - case n: RBNode @uc => - node = CollisionProofHashMap.minNodeNonNull(n) - return true - case n: LLNode @uc => - node = n - return true - } - } - false - } - } - - def next(): R = - if(!hasNext) Iterator.empty.next() - else node match { - case n: RBNode @uc => - val r = extract(n) - node = CollisionProofHashMap.successor(n ) - r - case n: LLNode @uc => - val r = extract(n) - node = n.next - r - } - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else new MapIterator[K] { - protected[this] def extract(node: LLNode) = node.key - protected[this] def extract(node: RBNode) = node.key - } - } - - override def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else new MapIterator[(K, V)] { - protected[this] def extract(node: LLNode) = (node.key, node.value) - protected[this] def extract(node: RBNode) = (node.key, node.value) - } - } - - private[this] def growTable(newlen: Int) = { - var oldlen = table.length - table = java.util.Arrays.copyOf(table, newlen) - threshold = newThreshold(table.length) - while(oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if(old ne null) splitBucket(old, i, i + oldlen, oldlen) - i += 1 - } - oldlen *= 2 - } - } - - @`inline` private[this] def reallocTable(newlen: Int) = { - table = new Array(newlen) - threshold = newThreshold(table.length) - } - - @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { - case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) - case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) - } - - private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { - val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - //preLow.next = null - //preHigh.next = null - var lastLow: LLNode = preLow - var lastHigh: LLNode = preHigh - var n = list - while(n ne null) { - val next = n.next - if((n.hash & mask) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if(list ne preLow.next) table(lowBucket) = preLow.next - if(preHigh.next ne null) { - table(highBucket) = preHigh.next - lastHigh.next = null - } - } - - private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { - var lowCount, highCount = 0 - tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) - if(highCount != 0) { - if(lowCount == 0) { - table(lowBucket) = null - table(highBucket) = tree - } else { - table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) - table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) - } - } - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - } - - override def remove(key: K): Option[V] = { - val v = remove0(key) - if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) - } - - def subtractOne(elem: K): this.type = { remove0(elem); this } - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - override def foreach[U](f: ((K, V)) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n match { - case n: LLNode @uc => n.foreach(f) - case n: RBNode @uc => n.foreach(f) - } - i += 1 - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n match { - case n: LLNode @uc => n.foreachEntry(f) - case n: RBNode @uc => n.foreachEntry(f) - } - i += 1 - } - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) - - override protected[this] def className = "CollisionProofHashMap" - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - val hash = computeHash(key) - val idx = index(hash) - table(idx) match { - case null => () - case n: LLNode @uc => - val nd = n.getNode(key, hash) - if(nd != null) return nd.value - case n => - val nd = n.asInstanceOf[RBNode].getNode(key, hash) - if(nd != null) return nd.value - } - val table0 = table - val default = defaultValue - if(contentSize + 1 >= threshold) growTable(table.length * 2) - // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. - val newIdx = if (table0 eq table) idx else index(hash) - put0(key, default, false, hash, newIdx) - default - } - - ///////////////////// Overrides code from SortedMapOps - - /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[K2, V2](f: ((K, V)) => (K2, V2)) - (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) - - /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) - (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.FlatMap(this, f)) - - /** Builds a new sorted map by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) - (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Collect(this, pf)) - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(this, it) - case _ => iterator.concat(suffix.iterator) - }) - - /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs) - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Appended(this, kv)) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) - - ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: - - @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red - @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red - - @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { - val i = hash - node.hash - if(i != 0) i else ordering.compare(key, node.key) - } - - @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { - /*val i = hash - node.hash - if(i != 0) i else*/ ordering.compare(key, node.key) - } - - // ---- insertion ---- - - @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { - val cmp = compare(key, hash, x) - if(cmp == 0) { - x.value = value - false - } else { - val next = if(cmp < 0) x.left else x.right - if(next eq null) { - val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) - if (cmp < 0) x.left = z else x.right = z - table(bucket) = fixAfterInsert(_root, z) - return true - } - else insertIntoExisting(_root, bucket, key, hash, value, next) - } - } - - private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { - if(tree eq null) { - table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) - true - } else insertIntoExisting(tree, bucket, key, hash, value, tree) - } - - private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { - var root = _root - var z = node - while (isRed(z.parent)) { - if (z.parent eq z.parent.parent.left) { - val y = z.parent.parent.right - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.right) { - z = z.parent - root = rotateLeft(root, z) - } - z.parent.red = false - z.parent.parent.red = true - root = rotateRight(root, z.parent.parent) - } - } else { // symmetric cases - val y = z.parent.parent.left - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.left) { - z = z.parent - root = rotateRight(root, z) - } - z.parent.red = false - z.parent.parent.red = true - root = rotateLeft(root, z.parent.parent) - } - } - } - root.red = false - root - } - - // ---- deletion ---- - - // returns the old value or Statics.pfMarker if not found - private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { - var root = _root - val z = root.getNode(key, hash: Int) - if (z ne null) { - val oldValue = z.value - var y = z - var yIsRed = y.red - var x: RBNode = null - var xParent: RBNode = null - - if (z.left eq null) { - x = z.right - root = transplant(root, z, z.right) - xParent = z.parent - } - else if (z.right eq null) { - x = z.left - root = transplant(root, z, z.left) - xParent = z.parent - } - else { - y = CollisionProofHashMap.minNodeNonNull(z.right) - yIsRed = y.red - x = y.right - - if (y.parent eq z) xParent = y - else { - xParent = y.parent - root = transplant(root, y, y.right) - y.right = z.right - y.right.parent = y - } - root = transplant(root, z, y) - y.left = z.left - y.left.parent = y - y.red = z.red - } - - if (!yIsRed) root = fixAfterDelete(root, x, xParent) - if(root ne _root) table(bucket) = root - oldValue - } else Statics.pfMarker - } - - private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { - var root = _root - var x = node - var xParent = parent - while ((x ne root) && isBlack(x)) { - if (x eq xParent.left) { - var w = xParent.right - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - root = rotateLeft(root, xParent) - w = xParent.right - } - if (isBlack(w.left) && isBlack(w.right)) { - w.red = true - x = xParent - } else { - if (isBlack(w.right)) { - w.left.red = false - w.red = true - root = rotateRight(root, w) - w = xParent.right - } - w.red = xParent.red - xParent.red = false - w.right.red = false - root = rotateLeft(root, xParent) - x = root - } - } else { // symmetric cases - var w = xParent.left - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - root = rotateRight(root, xParent) - w = xParent.left - } - if (isBlack(w.right) && isBlack(w.left)) { - w.red = true - x = xParent - } else { - if (isBlack(w.left)) { - w.right.red = false - w.red = true - root = rotateLeft(root, w) - w = xParent.left - } - w.red = xParent.red - xParent.red = false - w.left.red = false - root = rotateRight(root, xParent) - x = root - } - } - xParent = x.parent - } - if (x ne null) x.red = false - root - } - - // ---- helpers ---- - - @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { - var root = _root - val y = x.right - x.right = y.left - - val xp = x.parent - if (y.left ne null) y.left.parent = x - y.parent = xp - - if (xp eq null) root = y - else if (x eq xp.left) xp.left = y - else xp.right = y - - y.left = x - x.parent = y - root - } - - @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { - var root = _root - val y = x.left - x.left = y.right - - val xp = x.parent - if (y.right ne null) y.right.parent = x - y.parent = xp - - if (xp eq null) root = y - else if (x eq xp.right) xp.right = y - else xp.left = y - - y.right = x - x.parent = y - root - } - - /** - * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous - * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. - */ - private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { - var root = _root - if (to.parent eq null) root = from - else if (to eq to.parent.left) to.parent.left = from - else to.parent.right = from - if (from ne null) from.parent = to.parent - root - } - - // building - - def fromNodes(xs: Iterator[Node], size: Int): RBNode = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): RBNode = size match { - case 0 => null - case 1 => - val nn = xs.next() - val (key, hash, value) = nn match { - case nn: LLNode @uc => (nn.key, nn.hash, nn.value) - case nn: RBNode @uc => (nn.key, nn.hash, nn.value) - } - new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val nn = xs.next() - val right = f(level+1, size-1-leftSize) - val (key, hash, value) = nn match { - case nn: LLNode @uc => (nn.key, nn.hash, nn.value) - case nn: RBNode @uc => (nn.key, nn.hash, nn.value) - } - val n = new RBNode(key, hash, value, false, left, right, null) - if(left ne null) left.parent = n - right.parent = n - n - } - f(1, size) - } -} - -/** - * $factoryInfo - * @define Coll `mutable.CollisionProofHashMap` - * @define coll mutable collision-proof hash map - */ -@SerialVersionUID(3L) -object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { - private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." - - def from[sealed K : Ordering, sealed V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = { - val k = it.knownSize - val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity - new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it - } - - def empty[sealed K : Ordering, sealed V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] - - def newBuilder[sealed K : Ordering, sealed V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - - def newBuilder[sealed K : Ordering, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = - new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { - override def sizeHint(size: Int) = elems.sizeHint(size) - } - - /** The default load factor for the hash table */ - final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - final def defaultInitialCapacity: Int = 16 - - @SerialVersionUID(3L) - private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it - def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) - } - - @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { - val i = hash - node.hash - if(i != 0) i else ord.compare(key, node.key) - } - - @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { - /*val i = hash - node.hash - if(i != 0) i else*/ ord.compare(key, node.key) - } - - private final val treeifyThreshold = 8 - - // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. - // Keeping calls monomorphic where possible and dispatching manually where needed is faster. - sealed abstract class Node - - /////////////////////////// Red-Black Tree Node - - final class RBNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { - override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" - - @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { - val cmp = compare(k, h, this) - if (cmp < 0) { - if(left ne null) left.getNode(k, h) else null - } else if (cmp > 0) { - if(right ne null) right.getNode(k, h) else null - } else this - } - - def foreach[U](f: ((K, V)) => U): Unit = { - if(left ne null) left.foreach(f) - f((key, value)) - if(right ne null) right.foreach(f) - } - - def foreachEntry[U](f: (K, V) => U): Unit = { - if(left ne null) left.foreachEntry(f) - f(key, value) - if(right ne null) right.foreachEntry(f) - } - - def foreachNode[U](f: RBNode[K, V] => U): Unit = { - if(left ne null) left.foreachNode(f) - f(this) - if(right ne null) right.foreachNode(f) - } - } - - @`inline` private def leaf[sealed A, sealed B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = - new RBNode(key, hash, value, red, null, null, parent) - - @tailrec private def minNodeNonNull[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = - if (node.left eq null) node else minNodeNonNull(node.left) - - /** - * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, - * therefore, the last node), this method returns `null`. - */ - private def successor[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = { - if (node.right ne null) minNodeNonNull(node.right) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.right)) { - x = y - y = y.parent - } - y - } - } - - private final class RBNodesIterator[sealed A, sealed B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { - private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) - - def hasNext: Boolean = nextNode ne null - - @throws[NoSuchElementException] - def next(): RBNode[A, B] = nextNode match { - case null => Iterator.empty.next() - case node => - nextNode = successor(node) - node - } - } - - /////////////////////////// Linked List Node - - private final class LLNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { - override def toString = s"LLNode($key, $value, $hash) -> $next" - - private[this] def eq(a: Any, b: Any): Boolean = - if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) - - @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { - if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this - else if((next eq null) || (hash > h)) null - else next.getNode(k, h) - } - - @tailrec def foreach[U](f: ((K, V)) => U): Unit = { - f((key, value)) - if(next ne null) next.foreach(f) - } - - @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { - f(key, value) - if(next ne null) next.foreachEntry(f) - } - - @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { - f(this) - if(next ne null) next.foreachNode(f) - } - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala deleted file mode 100644 index 4d6f989e6f3d..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable -import language.experimental.captureChecking - -/** The canonical builder for collections that are growable, i.e. that support an - * efficient `+=` method which adds an element to the collection. - * - * GrowableBuilders can produce only a single instance of the collection they are growing. - * - * @define Coll `GrowingBuilder` - * @define coll growing builder - */ -class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) - extends Builder[Elem, To] { - - def clear(): Unit = elems.clear() - - def result(): To = elems - - def addOne(elem: Elem): this.type = { elems += elem; this } - - override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this } - - override def knownSize: Int = elems.knownSize -} diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala deleted file mode 100644 index ab45e7ffc73d..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/HashMap.scala +++ /dev/null @@ -1,655 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.{nowarn, tailrec} -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializationProxy -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** This class implements mutable maps using a hashtable. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @tparam K the type of the keys contained in this hash map. - * @tparam V the type of the values assigned to keys in this hash map. - * - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") -class HashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double) - extends AbstractMap[K, V] - with MapOps[K, V, HashMap, HashMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] - with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] - with MapFactoryDefaults[K, V, HashMap, Iterable] - with Serializable { - - /* The HashMap class holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. - * - Every bucket is sorted in ascendent hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) - - import HashMap.Node - - /** The actual hash table. */ - private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) - - /** The next size value at which to resize (capacity * load factor). */ - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def size: Int = contentSize - - /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of an original (`any.##`) hash. */ - @`inline` private[this] def improveHash(originalHash: Int): Int = { - // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the - // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement - // algorithm as in java.util.HashMap. - // - // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i - // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap - // and that is why unimproveHash simply forwards to this method - originalHash ^ (originalHash >>> 16) - } - - /** Computes the improved hash of this key */ - @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - override def contains(key: K): Boolean = findNode(key) ne null - - @`inline` private[this] def findNode(key: K): Node[K, V] = { - val hash = computeHash(key) - table(index(hash)) match { - case null => null - case nd => nd.findNode(key, hash) - } - } - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) - if(target > table.length) growTable(target) - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - sizeHint(xs.knownSize) - - xs match { - case hm: immutable.HashMap[K, V] => - hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) - this - case hm: mutable.HashMap[K, V] => - val iter = hm.nodeIterator - while (iter.hasNext) { - val next = iter.next() - put0(next.key, next.value, next.hash, getOld = false) - } - this - case lhm: mutable.LinkedHashMap[K, V] => - val iter = lhm.entryIterator - while (iter.hasNext) { - val entry = iter.next() - put0(entry.key, entry.value, entry.hash, getOld = false) - } - this - case thatMap: Map[K, V] => - thatMap.foreachEntry { (key: K, value: V) => - put0(key, value, improveHash(key.##), getOld = false) - } - this - case _ => - super.addAll(xs) - } - } - - // Override updateWith for performance, so we can do the update while hashing - // the input key only once and performing one lookup into the hash table - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - if (getClass != classOf[HashMap[_, _]]) { - // subclasses of HashMap might customise `get` ... - super.updateWith(key)(remappingFunction) - } else { - val hash = computeHash(key) - val indexedHash = index(hash) - - var foundNode: Node[K, V] = null - var previousNode: Node[K, V] = null - table(indexedHash) match { - case null => - case nd => - @tailrec - def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { - if (h == nd.hash && k == nd.key) { - previousNode = prev - foundNode = nd - } - else if ((nd.next eq null) || (nd.hash > h)) () - else findNode(nd, nd.next, k, h) - } - - findNode(null, nd, key, hash) - } - - val previousValue = foundNode match { - case null => None - case nd => Some(nd.value) - } - - val nextValue = remappingFunction(previousValue) - - (previousValue, nextValue) match { - case (None, None) => // do nothing - - case (Some(_), None) => - if (previousNode != null) previousNode.next = foundNode.next - else table(indexedHash) = foundNode.next - contentSize -= 1 - - case (None, Some(value)) => - val newIndexedHash = - if (contentSize + 1 >= threshold) { - growTable(table.length * 2) - index(hash) - } else indexedHash - put0(key, value, false, hash, newIndexedHash) - - case (Some(_), Some(newValue)) => foundNode.value = newValue - } - nextValue - } - } - - override def subtractAll(xs: IterableOnce[K]^): this.type = { - if (size == 0) { - return this - } - - xs match { - case hs: immutable.HashSet[K] => - hs.foreachWithHashWhile { (k, h) => - remove0(k, improveHash(h)) - size > 0 - } - this - case hs: mutable.HashSet[K] => - val iter = hs.nodeIterator - while (iter.hasNext) { - val next = iter.next() - remove0(next.key, next.hash) - if (size == 0) return this - } - this - case lhs: mutable.LinkedHashSet[K] => - val iter = lhs.entryIterator - while (iter.hasNext) { - val next = iter.next() - remove0(next.key, next.hash) - if (size == 0) return this - } - this - case _ => super.subtractAll(xs) - } - } - - /** Adds a key-value pair to this map - * - * @param key the key to add - * @param value the value to add - * @param hash the **improved** hashcode of `key` (see computeHash) - * @param getOld if true, then the previous value for `key` will be returned, otherwise, false - */ - private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(key) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { - table(idx) match { - case null => - table(idx) = new Node[K, V](key, hash, value, null) - case old => - var prev: Node[K, V] = null - var n = old - while((n ne null) && n.hash <= hash) { - if(n.hash == hash && key == n.key) { - val old = n.value - n.value = value - return if(getOld) Some(old) else null - } - prev = n - n = n.next - } - if(prev eq null) table(idx) = new Node(key, hash, value, old) - else prev.next = new Node(key, hash, value, prev.next) - } - contentSize += 1 - null - } - - private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) - - /** Removes a key from this map if it exists - * - * @param elem the element to remove - * @param hash the **improved** hashcode of `element` (see computeHash) - * @return the node that contained element if it was present, otherwise null - */ - private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { - val idx = index(hash) - table(idx) match { - case null => null - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - contentSize -= 1 - nd - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while((next ne null) && next.hash <= hash) { - if(next.hash == hash && next.key == elem) { - prev.next = next.next - contentSize -= 1 - return next - } - prev = next - next = next.next - } - null - } - } - - private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { - private[this] var i = 0 - private[this] var node: Node[K, V] = null - private[this] val len = table.length - - protected[this] def extract(nd: Node[K, V]): A - - def hasNext: Boolean = { - if(node ne null) true - else { - while(i < len) { - val n = table(i) - i += 1 - if(n ne null) { node = n; return true } - } - false - } - } - - def next(): A = - if(!hasNext) Iterator.empty.next() - else { - val r = extract(node) - node = node.next - r - } - } - - override def iterator: Iterator[(K, V)] = - if(size == 0) Iterator.empty - else new HashMapIterator[(K, V)] { - protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) - } - - override def keysIterator: Iterator[K] = - if(size == 0) Iterator.empty - else new HashMapIterator[K] { - protected[this] def extract(nd: Node[K, V]) = nd.key - } - - override def valuesIterator: Iterator[V] = - if(size == 0) Iterator.empty - else new HashMapIterator[V] { - protected[this] def extract(nd: Node[K, V]) = nd.value - } - - - /** Returns an iterator over the nodes stored in this HashMap */ - private[collection] def nodeIterator: Iterator[Node[K, V]] = - if(size == 0) Iterator.empty - else new HashMapIterator[Node[K, V]] { - protected[this] def extract(nd: Node[K, V]) = nd - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape. - parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). - asInstanceOf[S with EfficientSplit] - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) - case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) - case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) - case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) - case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) - case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) - case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) - } - s.asInstanceOf[S with EfficientSplit] - } - - private[this] def growTable(newlen: Int) = { - if (newlen < 0) - throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") - var oldlen = table.length - threshold = newThreshold(newlen) - if(size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while(oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if(old ne null) { - preLow.next = null - preHigh.next = null - var lastLow: Node[K, V] = preLow - var lastHigh: Node[K, V] = preHigh - var n = old - while(n ne null) { - val next = n.next - if((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if(old ne preLow.next) table(i) = preLow.next - if(preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - } - - def get(key: K): Option[V] = findNode(key) match { - case null => None - case nd => Some(nd.value) - } - - @throws[NoSuchElementException] - override def apply(key: K): V = findNode(key) match { - case null => default(key) - case nd => nd.value - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - if (getClass != classOf[HashMap[_, _]]) { - // subclasses of HashMap might customise `get` ... - super.getOrElse(key, default) - } else { - // .. but in the common case, we can avoid the Option boxing. - val nd = findNode(key) - if (nd eq null) default else nd.value - } - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - if (getClass != classOf[HashMap[_, _]]) { - // subclasses of HashMap might customise `get` ... - super.getOrElseUpdate(key, defaultValue) - } else { - val hash = computeHash(key) - val idx = index(hash) - val nd = table(idx) match { - case null => null - case nd => nd.findNode(key, hash) - } - if(nd != null) nd.value - else { - val table0 = table - val default = defaultValue - if(contentSize + 1 >= threshold) growTable(table.length * 2) - // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. - val newIdx = if (table0 eq table) idx else index(hash) - put0(key, default, false, hash, newIdx) - default - } - } - } - - override def put(key: K, value: V): Option[V] = put0(key, value, true) match { - case null => None - case sm => sm - } - - override def remove(key: K): Option[V] = remove0(key) match { - case null => None - case nd => Some(nd.value) - } - - override def update(key: K, value: V): Unit = put0(key, value, false) - - def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } - - def subtractOne(elem: K): this.type = { remove0(elem); this } - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - override def foreach[U](f: ((K, V)) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n.foreach(f) - i += 1 - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n.foreachEntry(f) - i += 1 - } - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) - - override def filterInPlace(p: (K, V) => Boolean): this.type = { - if (nonEmpty) { - var bucket = 0 - - while (bucket < table.length) { - var head = table(bucket) - - while ((head ne null) && !p(head.key, head.value)) { - head = head.next - contentSize -= 1 - } - - if (head ne null) { - var prev = head - var next = head.next - - while (next ne null) { - if (p(next.key, next.value)) { - prev = next - } else { - prev.next = next.next - contentSize -= 1 - } - next = next.next - } - } - - table(bucket) = head - bucket += 1 - } - } - this - } - - // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) - private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { - val len = table.length - var i = 0 - while (i < len) { - var n = table(i) - while (n ne null) { - n.value = f(n.key, n.value) - n = n.next - } - i += 1 - } - this - } - - override def mapFactory: MapFactory[HashMap] = HashMap - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "HashMap" - - override def hashCode: Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - val tupleHashIterator = new HashMapIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override protected[this] def extract(nd: Node[K, V]): Any = { - hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) - this - } - } - MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) - } - } -} - -/** - * $factoryInfo - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - */ -@SerialVersionUID(3L) -object HashMap extends MapFactory[HashMap] { - - def empty[sealed K, sealed V]: HashMap[K, V] = new HashMap[K, V] - - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = { - val k = it.knownSize - val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity - new HashMap[K, V](cap, defaultLoadFactor).addAll(it) - } - - def newBuilder[sealed K, sealed V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - - def newBuilder[sealed K, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = - new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { - override def sizeHint(size: Int) = elems.sizeHint(size) - } - - /** The default load factor for the hash table */ - final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - final def defaultInitialCapacity: Int = 16 - - @SerialVersionUID(3L) - private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) - def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) - } - - private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { - def key: K = _key - def hash: Int = _hash - def value: V = _value - def value_= (v: V): Unit = _value = v - def next: Node[K, V] = _next - def next_= (n: Node[K, V]): Unit = _next = n - - @tailrec - def findNode(k: K, h: Int): Node[K, V] = - if(h == _hash && k == _key) this - else if((_next eq null) || (_hash > h)) null - else _next.findNode(k, h) - - @tailrec - def foreach[U](f: ((K, V)) => U): Unit = { - f((_key, _value)) - if(_next ne null) _next.foreach(f) - } - - @tailrec - def foreachEntry[U](f: (K, V) => U): Unit = { - f(_key, _value) - if(_next ne null) _next.foreachEntry(f) - } - - override def toString = s"Node($key, $value, $hash) -> $next" - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala deleted file mode 100644 index e8c055ff15ef..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/HashSet.scala +++ /dev/null @@ -1,457 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.tailrec -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializationProxy -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** This class implements mutable sets using a hashtable. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class HashSet[sealed A](initialCapacity: Int, loadFactor: Double) - extends AbstractSet[A] - with SetOps[A, HashSet, HashSet[A]] - with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] - with IterableFactoryDefaults[A, HashSet] - with Serializable { - - def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) - - import HashSet.Node - - /* The Hashset class holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. - * - Every bucket is sorted in ascendent hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - /** The actual hash table. */ - private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) - - /** The next size value at which to resize (capacity * load factor). */ - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def size: Int = contentSize - - /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of an original (`any.##`) hash. */ - private[this] def improveHash(originalHash: Int): Int = { - // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the - // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement - // algorithm as in java.util.HashMap. - originalHash ^ (originalHash >>> 16) - } - - /** Computes the improved hash of this element */ - @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - override def contains(elem: A): Boolean = findNode(elem) ne null - - @`inline` private[this] def findNode(elem: A): Node[A] = { - val hash = computeHash(elem) - table(index(hash)) match { - case null => null - case nd => nd.findNode(elem, hash) - } - } - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) - if(target > table.length) growTable(target) - } - - override def add(elem: A) : Boolean = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - addElem(elem, computeHash(elem)) - } - - override def addAll(xs: IterableOnce[A]^): this.type = { - sizeHint(xs.knownSize) - xs match { - case hs: immutable.HashSet[A] => - hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) - this - case hs: mutable.HashSet[A] => - val iter = hs.nodeIterator - while (iter.hasNext) { - val next = iter.next() - addElem(next.key, next.hash) - } - this - case lhs: mutable.LinkedHashSet[A] => - val iter = lhs.entryIterator - while (iter.hasNext) { - val next = iter.next() - addElem(next.key, next.hash) - } - this - case _ => super.addAll(xs) - } - } - - override def subtractAll(xs: IterableOnce[A]^): this.type = { - if (size == 0) { - return this - } - - xs match { - case hs: immutable.HashSet[A] => - hs.foreachWithHashWhile { (k, h) => - remove(k, improveHash(h)) - size > 0 - } - this - case hs: mutable.HashSet[A] => - val iter = hs.nodeIterator - while (iter.hasNext) { - val next = iter.next() - remove(next.key, next.hash) - if (size == 0) return this - } - this - case lhs: mutable.LinkedHashSet[A] => - val iter = lhs.entryIterator - while (iter.hasNext) { - val next = iter.next() - remove(next.key, next.hash) - if (size == 0) return this - } - this - case _ => super.subtractAll(xs) - } - } - - /** Adds an element to this set - * @param elem element to add - * @param hash the **improved** hash of `elem` (see computeHash) - */ - private[this] def addElem(elem: A, hash: Int) : Boolean = { - val idx = index(hash) - table(idx) match { - case null => - table(idx) = new Node(elem, hash, null) - case old => - var prev: Node[A] = null - var n = old - while((n ne null) && n.hash <= hash) { - if(n.hash == hash && elem == n.key) return false - prev = n - n = n.next - } - if(prev eq null) - table(idx) = new Node(elem, hash, old) - else - prev.next = new Node(elem, hash, prev.next) - } - contentSize += 1 - true - } - - private[this] def remove(elem: A, hash: Int): Boolean = { - val idx = index(hash) - table(idx) match { - case null => false - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - contentSize -= 1 - true - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while((next ne null) && next.hash <= hash) { - if(next.hash == hash && next.key == elem) { - prev.next = next.next - contentSize -= 1 - return true - } - prev = next - next = next.next - } - false - } - } - - override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) - - private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { - private[this] var i = 0 - private[this] var node: Node[A] = null - private[this] val len = table.length - - protected[this] def extract(nd: Node[A]): B - - def hasNext: Boolean = { - if(node ne null) true - else { - while(i < len) { - val n = table(i) - i += 1 - if(n ne null) { node = n; return true } - } - false - } - } - - def next(): B = - if(!hasNext) Iterator.empty.next() - else { - val r = extract(node) - node = node.next - r - } - } - - override def iterator: Iterator[A] = new HashSetIterator[A] { - override protected[this] def extract(nd: Node[A]): A = nd.key - } - - /** Returns an iterator over the nodes stored in this HashSet */ - private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { - override protected[this] def extract(nd: Node[A]): Node[A] = nd - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) - case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) - case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) - case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) - } - s.asInstanceOf[S with EfficientSplit] - } - - private[this] def growTable(newlen: Int) = { - var oldlen = table.length - threshold = newThreshold(newlen) - if(size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) - val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while(oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if(old ne null) { - preLow.next = null - preHigh.next = null - var lastLow: Node[A] = preLow - var lastHigh: Node[A] = preHigh - var n = old - while(n ne null) { - val next = n.next - if((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if(old ne preLow.next) table(i) = preLow.next - if(preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - override def filterInPlace(p: A => Boolean): this.type = { - if (nonEmpty) { - var bucket = 0 - - while (bucket < table.length) { - var head = table(bucket) - - while ((head ne null) && !p(head.key)) { - head = head.next - contentSize -= 1 - } - - if (head ne null) { - var prev = head - var next = head.next - - while (next ne null) { - if (p(next.key)) { - prev = next - } else { - prev.next = next.next - contentSize -= 1 - } - next = next.next - } - } - - table(bucket) = head - bucket += 1 - } - } - this - } - - /* - private[mutable] def checkTable(): Unit = { - var i = 0 - var count = 0 - var prev: Node[A] = null - while(i < table.length) { - var n = table(i) - prev = null - while(n != null) { - count += 1 - assert(index(n.hash) == i) - if(prev ne null) assert(prev.hash <= n.hash) - prev = n - n = n.next - } - i += 1 - } - assert(contentSize == count) - } - */ - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - - def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - } - - override def iterableFactory: IterableFactory[HashSet] = HashSet - - @`inline` def addOne(elem: A): this.type = { add(elem); this } - - @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - override def foreach[U](f: A => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n.foreach(f) - i += 1 - } - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) - - override protected[this] def className = "HashSet" - - override def hashCode: Int = { - val setIterator = this.iterator - val hashIterator: Iterator[Any] = - if (setIterator.isEmpty) setIterator - else new HashSetIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override protected[this] def extract(nd: Node[A]): Any = { - hash = unimproveHash(nd.hash) - this - } - } - MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) - } -} - -/** - * $factoryInfo - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - */ -@SerialVersionUID(3L) -object HashSet extends IterableFactory[HashSet] { - - def from[sealed B](it: scala.collection.IterableOnce[B]^): HashSet[B] = { - val k = it.knownSize - val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity - new HashSet[B](cap, defaultLoadFactor) ++= it - } - - def empty[sealed A]: HashSet[A] = new HashSet[A] - - def newBuilder[sealed A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - - def newBuilder[sealed A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = - new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { - override def sizeHint(size: Int) = elems.sizeHint(size) - } - - /** The default load factor for the hash table */ - final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - final def defaultInitialCapacity: Int = 16 - - @SerialVersionUID(3L) - private final class DeserializationFactory[sealed A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it - def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) - } - - private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { - def key: K = _key - def hash: Int = _hash - def next: Node[K] = _next - def next_= (n: Node[K]): Unit = _next = n - - @tailrec - def findNode(k: K, h: Int): Node[K] = - if(h == _hash && k == _key) this - else if((_next eq null) || (_hash > h)) null - else _next.findNode(k, h) - - @tailrec - def foreach[U](f: K => U): Unit = { - f(_key) - if(_next ne null) _next.foreach(f) - } - - override def toString = s"Node($key, $hash) -> $next" - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala deleted file mode 100644 index a3534e322cf3..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/HashTable.scala +++ /dev/null @@ -1,418 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import collection.{AbstractIterator, Iterator} - -import java.lang.Integer.{numberOfLeadingZeros, rotateRight} -import scala.util.hashing.byteswap32 - -import java.lang.Integer -import language.experimental.captureChecking - -/** This class can be used to construct data structures that are based - * on hashtables. Class `HashTable[A]` implements a hashtable - * that maps keys of type `A` to values of the fully abstract - * member type `Entry`. Classes that make use of `HashTable` - * have to provide an implementation for `Entry`. - * - * There are mainly two parameters that affect the performance of a hashtable: - * the initial size and the load factor. The size - * refers to the number of buckets in the hashtable, and the load - * factor is a measure of how full the hashtable is allowed to get before - * its size is automatically doubled. Both parameters may be changed by - * overriding the corresponding values in class `HashTable`. - * - * @tparam A type of the elements contained in this hash table. - */ -// Not used in the standard library, but used in scala-parallel-collections -private[collection] trait HashTable[sealed A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { - // Replacing Entry type parameter by abstract type member here allows to not expose to public - // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. - // However, I'm afraid it's too late now for such breaking change. - import HashTable._ - - protected var _loadFactor = defaultLoadFactor - - /** The actual hash table. - */ - protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) - - /** The number of mappings contained in this hash table. - */ - protected[collection] var tableSize: Int = 0 - - final def size: Int = tableSize - - /** The next size value at which to resize (capacity * load factor). - */ - protected[collection] var threshold: Int = initialThreshold(_loadFactor) - - /** The array keeping track of the number of elements in 32 element blocks. - */ - protected var sizemap: Array[Int] = null - - protected var seedvalue: Int = tableSizeSeed - - protected def tableSizeSeed = Integer.bitCount(table.length - 1) - - /** The initial size of the hash table. - */ - protected def initialSize: Int = 16 - - /** The initial threshold. - */ - private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) - - private def initialCapacity = capacity(initialSize) - - private def lastPopulatedIndex = { - var idx = table.length - 1 - while (table(idx) == null && idx > 0) - idx -= 1 - - idx - } - - /** - * Initializes the collection from the input stream. `readEntry` will be called for each - * entry to be read from the input stream. - */ - private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { - _loadFactor = in.readInt() - assert(_loadFactor > 0) - - val size = in.readInt() - tableSize = 0 - assert(size >= 0) - - seedvalue = in.readInt() - - val smDefined = in.readBoolean() - - table = new Array(capacity(sizeForThreshold(_loadFactor, size))) - threshold = newThreshold(_loadFactor, table.length) - - if (smDefined) sizeMapInit(table.length) else sizemap = null - - var index = 0 - while (index < size) { - addEntry(readEntry) - index += 1 - } - } - - /** - * Serializes the collection to the output stream by saving the load factor, collection - * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. - * - * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To - * deserialize, `init` should be used. - */ - private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { - out.writeInt(_loadFactor) - out.writeInt(tableSize) - out.writeInt(seedvalue) - out.writeBoolean(isSizeMapDefined) - - foreachEntry(writeEntry) - } - - /** Find entry with given key in table, null if not found. - */ - final def findEntry(key: A): Entry = - findEntry0(key, index(elemHashCode(key))) - - protected[collection] final def findEntry0(key: A, h: Int): Entry = { - var e = table(h).asInstanceOf[Entry] - while (e != null && !elemEquals(e.key, key)) e = e.next - e - } - - /** Add entry to table - * pre: no entry with same key exists - */ - protected[collection] final def addEntry(e: Entry): Unit = { - addEntry0(e, index(elemHashCode(e.key))) - } - - protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { - e.next = table(h).asInstanceOf[Entry] - table(h) = e - tableSize = tableSize + 1 - nnSizeMapAdd(h) - if (tableSize > threshold) - resize(2 * table.length) - } - - /** Find entry with given key in table, or add new one if not found. - * May be somewhat faster then `findEntry`/`addEntry` pair as it - * computes entry's hash index only once. - * Returns entry found in table or null. - * New entries are created by calling `createNewEntry` method. - */ - def findOrAddEntry(key: A, value: B): Entry = { - val h = index(elemHashCode(key)) - val e = findEntry0(key, h) - if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } - } - - /** Creates new entry to be immediately inserted into the hashtable. - * This method is guaranteed to be called only once and in case that the entry - * will be added. In other words, an implementation may be side-effecting. - */ - def createNewEntry(key: A, value: B): Entry - - /** Remove entry from table if present. - */ - final def removeEntry(key: A) : Entry = { - removeEntry0(key, index(elemHashCode(key))) - } - /** Remove entry from table if present. - */ - private[collection] final def removeEntry0(key: A, h: Int) : Entry = { - var e = table(h).asInstanceOf[Entry] - if (e != null) { - if (elemEquals(e.key, key)) { - table(h) = e.next - tableSize = tableSize - 1 - nnSizeMapRemove(h) - e.next = null - return e - } else { - var e1 = e.next - while (e1 != null && !elemEquals(e1.key, key)) { - e = e1 - e1 = e1.next - } - if (e1 != null) { - e.next = e1.next - tableSize = tableSize - 1 - nnSizeMapRemove(h) - e1.next = null - return e1 - } - } - } - null - } - - /** An iterator returning all entries. - */ - def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { - val iterTable = table - var idx = lastPopulatedIndex - var es = iterTable(idx) - - def hasNext = es != null - def next() = { - val res = es - es = es.next - while (es == null && idx > 0) { - idx = idx - 1 - es = iterTable(idx) - } - res.asInstanceOf[Entry] - } - } - - /** Avoid iterator for a 2x faster traversal. */ - def foreachEntry[U](f: Entry => U): Unit = { - val iterTable = table - var idx = lastPopulatedIndex - var es = iterTable(idx) - - while (es != null) { - val next = es.next // Cache next in case f removes es. - f(es.asInstanceOf[Entry]) - es = next - - while (es == null && idx > 0) { - idx -= 1 - es = iterTable(idx) - } - } - } - - /** Remove all entries from table - */ - def clearTable(): Unit = { - var i = table.length - 1 - while (i >= 0) { table(i) = null; i = i - 1 } - tableSize = 0 - nnSizeMapReset(0) - } - - private def resize(newSize: Int): Unit = { - val oldTable = table - table = new Array(newSize) - nnSizeMapReset(table.length) - var i = oldTable.length - 1 - while (i >= 0) { - var e = oldTable(i) - while (e != null) { - val h = index(elemHashCode(e.key)) - val e1 = e.next - e.next = table(h).asInstanceOf[Entry] - table(h) = e - e = e1 - nnSizeMapAdd(h) - } - i = i - 1 - } - threshold = newThreshold(_loadFactor, newSize) - } - - /* Size map handling code */ - - /* - * The following three sizeMap* functions (Add, Remove, Reset) - * are used to update the size map of the hash table. - * - * The size map logically divides the hash table into `sizeMapBucketSize` element buckets - * by keeping an integer entry for each such bucket. Each integer entry simply denotes - * the number of elements in the corresponding bucket. - * Best understood through an example, see: - * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) - * sizemap = [ 2 | 3 ] (2 entries) - * where sizeMapBucketSize == 4. - * - * By default the size map is not initialized, so these methods don't do anything, thus, - * their impact on hash table performance is negligible. However, if the hash table - * is converted into a parallel hash table, the size map is initialized, as it will be needed - * there. - */ - protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) += 1 - } - - protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) -= 1 - } - - protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { - val nsize = calcSizeMapSize(tableLength) - if (sizemap.length != nsize) sizemap = new Array[Int](nsize) - else java.util.Arrays.fill(sizemap, 0) - } - - private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize - - protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 - - // discards the previous sizemap and only allocates a new one - protected def sizeMapInit(tableLength: Int): Unit = { - sizemap = new Array[Int](calcSizeMapSize(tableLength)) - } - - // discards the previous sizemap and populates the new one - protected final def sizeMapInitAndRebuild() = { - sizeMapInit(table.length) - - // go through the buckets, count elements - var tableidx = 0 - var bucketidx = 0 - val tbl = table - var tableuntil = 0 - if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize - val totalbuckets = totalSizeMapBuckets - while (bucketidx < totalbuckets) { - var currbucketsize = 0 - while (tableidx < tableuntil) { - var e = tbl(tableidx) - while (e ne null) { - currbucketsize += 1 - e = e.next - } - tableidx += 1 - } - sizemap(bucketidx) = currbucketsize - tableuntil += sizeMapBucketSize - bucketidx += 1 - } - } - - private[collection] def printSizeMap() = { - println(sizemap.to(collection.immutable.List)) - } - - protected final def sizeMapDisable() = sizemap = null - - protected final def isSizeMapDefined = sizemap ne null - - // override to automatically initialize the size map - protected def alwaysInitSizeMap = false - - /* End of size map handling code */ - - protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) - - /** - * Note: we take the most significant bits of the hashcode, not the lower ones - * this is of crucial importance when populating the table in parallel - */ - protected[collection] final def index(hcode: Int): Int = { - val ones = table.length - 1 - val exponent = Integer.numberOfLeadingZeros(ones) - (improve(hcode, seedvalue) >>> exponent) & ones - } -} - -private[collection] object HashTable { - /** The load factor for the hash table (in 0.001 step). - */ - private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% - private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible - - private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt - - private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt - - private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) - - trait HashUtils[KeyType] { - protected final def sizeMapBucketBitSize = 5 - // so that: - protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize - - protected[collection] def elemHashCode(key: KeyType) = key.## - - /** - * Defer to a high-quality hash in [[scala.util.hashing]]. - * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. - *

- * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 - * {{{ - * var h: Int = hcode + ~(hcode << 9) - * h = h ^ (h >>> 14) - * h = h + (h << 4) - * h ^ (h >>> 10) - * }}} - * the rest of the computation is due to SI-5293 - */ - protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) - } - - /** - * Returns a power of two >= `target`. - */ - private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) -} - -/** Class used internally. - */ -private[collection] trait HashEntry[A, sealed E <: HashEntry[A, E]] { - val key: A - var next: E = _ -} diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala deleted file mode 100644 index 1af98162e9f3..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable -import language.experimental.captureChecking - - -/** - * Reusable builder for immutable collections - */ -abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) - extends ReusableBuilder[A, C] { - - protected var elems: C = empty - - def clear(): Unit = { elems = empty } - - def result(): C = elems - - override def knownSize: Int = elems.knownSize -} diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala deleted file mode 100644 index 022970b4c56f..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable -import language.experimental.captureChecking - -trait IndexedSeq[T] extends Seq[T] - with scala.collection.IndexedSeq[T] - with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] - with IterableFactoryDefaults[T, IndexedSeq] { - - override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq -} - -@SerialVersionUID(3L) -object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) - -trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] - extends scala.collection.IndexedSeqOps[A, CC, C] - with SeqOps[A, CC, C] { - - /** Modifies this $coll by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @return this $coll modified by replacing all elements with the - * result of applying the given function `f` to each element - * of this $coll. - */ - def mapInPlace(f: A => A): this.type = { - var i = 0 - val siz = size - while (i < siz) { this(i) = f(this(i)); i += 1 } - this - } - - /** Sorts this $coll in place according to an Ordering. - * - * @see [[scala.collection.SeqOps.sorted]] - * @param ord the ordering to be used to compare elements. - * @return modified input $coll sorted according to the ordering `ord`. - */ - def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { - val len = this.length - if (len > 1) { - val arr = new Array[AnyRef](len) - var i = 0 - for (x <- this) { - arr(i) = x.asInstanceOf[AnyRef] - i += 1 - } - java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) - i = 0 - while (i < arr.length) { - update(i, arr(i).asInstanceOf[A]) - i += 1 - } - } - this - } - - /** Sorts this $coll in place according to a comparison function. - * - * @see [[scala.collection.SeqOps.sortWith]] - */ - def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) - - /** Sorts this $coll in place according to the Ordering which results from transforming - * an implicitly given Ordering with a transformation function. - * - * @see [[scala.collection.SeqOps.sortBy]] - */ - def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala deleted file mode 100644 index a253e8738b26..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala +++ /dev/null @@ -1,510 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.{nowarn, tailrec} -import scala.collection.generic.DefaultSerializable -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - - -/** This class implements mutable maps using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam K the type of the keys contained in this hash map. - * @tparam V the type of the values assigned to keys in this hash map. - * - * @define Coll `LinkedHashMap` - * @define coll linked hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") -class LinkedHashMap[sealed K, sealed V] - extends AbstractMap[K, V] - with SeqMap[K, V] - with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] - with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] - with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap - - // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper - // would not return the elements in insertion order - - private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] - - private[collection] def _firstEntry: Entry = firstEntry - - protected var firstEntry: Entry = null - - protected var lastEntry: Entry = null - - /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. - * - Every bucket is sorted in ascendant hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) - - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def last: (K, V) = - if (size > 0) (lastEntry.key, lastEntry.value) - else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") - - override def lastOption: Option[(K, V)] = - if (size > 0) Some((lastEntry.key, lastEntry.value)) - else None - - override def head: (K, V) = - if (size > 0) (firstEntry.key, firstEntry.value) - else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") - - override def headOption: Option[(K, V)] = - if (size > 0) Some((firstEntry.key, firstEntry.value)) - else None - - override def size = contentSize - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - - def get(key: K): Option[V] = { - val e = findEntry(key) - if (e == null) None - else Some(e.value) - } - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) - if (target > table.length) growTable(target) - } - - override def contains(key: K): Boolean = { - if (getClass eq classOf[LinkedHashMap[_, _]]) - findEntry(key) != null - else - super.contains(key) // A subclass might override `get`, use the default implementation `contains`. - } - - override def put(key: K, value: V): Option[V] = put0(key, value, true) match { - case null => None - case sm => sm - } - - override def update(key: K, value: V): Unit = put0(key, value, false) - - override def remove(key: K): Option[V] = removeEntry0(key) match { - case null => None - case nd => Some(nd.value) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - if (getClass != classOf[LinkedHashMap[_, _]]) { - // subclasses of LinkedHashMap might customise `get` ... - super.getOrElse(key, default) - } else { - // .. but in the common case, we can avoid the Option boxing. - val nd = findEntry(key) - if (nd eq null) default else nd.value - } - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - if (getClass != classOf[LinkedHashMap[_, _]]) { - // subclasses of LinkedHashMap might customise `get` ... - super.getOrElseUpdate(key, defaultValue) - } else { - val hash = computeHash(key) - val idx = index(hash) - val nd = table(idx) match { - case null => null - case nd => nd.findEntry(key, hash) - } - if (nd != null) nd.value - else { - val table0 = table - val default = defaultValue - if (contentSize + 1 >= threshold) growTable(table.length * 2) - // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. - val newIdx = if (table0 eq table) idx else index(hash) - put0(key, default, false, hash, newIdx) - default - } - } - } - - private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) - - /** Removes a key from this map if it exists - * - * @param elem the element to remove - * @param hash the **improved** hashcode of `element` (see computeHash) - * @return the node that contained element if it was present, otherwise null - */ - private[this] def removeEntry0(elem: K, hash: Int): Entry = { - val idx = index(hash) - table(idx) match { - case null => null - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - deleteEntry(nd) - contentSize -= 1 - nd - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while ((next ne null) && next.hash <= hash) { - if (next.hash == hash && next.key == elem) { - prev.next = next.next - deleteEntry(next) - contentSize -= 1 - return next - } - prev = next - next = next.next - } - null - } - } - - /** Computes the improved hash of an original (`any.##`) hash. */ - @`inline` private[this] def improveHash(originalHash: Int): Int = { - originalHash ^ (originalHash >>> 16) - } - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of this key */ - @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - @`inline` private[this] def findEntry(key: K): Entry = { - val hash = computeHash(key) - table(index(hash)) match { - case null => null - case nd => nd.findEntry(key, hash) - } - } - - def addOne(kv: (K, V)): this.type = { - put(kv._1, kv._2) - this - } - - def subtractOne(key: K): this.type = { - remove(key) - this - } - - private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { - private[this] var cur = firstEntry - def extract(nd: Entry): T - def hasNext: Boolean = cur ne null - def next(): T = - if (hasNext) { val r = extract(cur); cur = cur.later; r } - else Iterator.empty.next() - } - - def iterator: Iterator[(K, V)] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[(K, V)] { - def extract(nd: Entry): (K, V) = (nd.key, nd.value) - } - - protected class LinkedKeySet extends KeySet { - override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet - } - - override def keySet: collection.Set[K] = new LinkedKeySet - - override def keysIterator: Iterator[K] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[K] { - def extract(nd: Entry): K = nd.key - } - - private[collection] def entryIterator: Iterator[Entry] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[Entry] { - def extract(nd: Entry): Entry = nd - } - - - // Override updateWith for performance, so we can do the update while hashing - // the input key only once and performing one lookup into the hash table - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - if (getClass != classOf[LinkedHashMap[_, _]]) { - // subclasses of LinkedHashMap might customise `get` ... - super.updateWith(key)(remappingFunction) - } else { - val hash = computeHash(key) - val indexedHash = index(hash) - - var foundEntry: Entry = null - var previousEntry: Entry = null - table(indexedHash) match { - case null => - case nd => - @tailrec - def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { - if (h == nd.hash && k == nd.key) { - previousEntry = prev - foundEntry = nd - } - else if ((nd.next eq null) || (nd.hash > h)) () - else findEntry(nd, nd.next, k, h) - } - - findEntry(null, nd, key, hash) - } - - val previousValue = foundEntry match { - case null => None - case nd => Some(nd.value) - } - - val nextValue = remappingFunction(previousValue) - - (previousValue, nextValue) match { - case (None, None) => // do nothing - - case (Some(_), None) => - if (previousEntry != null) previousEntry.next = foundEntry.next - else table(indexedHash) = foundEntry.next - deleteEntry(foundEntry) - contentSize -= 1 - - case (None, Some(value)) => - val newIndexedHash = - if (contentSize + 1 >= threshold) { - growTable(table.length * 2) - index(hash) - } else indexedHash - put0(key, value, false, hash, newIndexedHash) - - case (Some(_), Some(newValue)) => foundEntry.value = newValue - } - nextValue - } - } - - override def valuesIterator: Iterator[V] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[V] { - def extract(nd: Entry): V = nd.value - } - - - override def foreach[U](f: ((K, V)) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f((cur.key, cur.value)) - cur = cur.later - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key, cur.value) - cur = cur.later - } - } - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - firstEntry = null - lastEntry = null - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt - - /*create a new entry. If table is empty(firstEntry is null), then the - * new entry will be the firstEntry. If not, just set the new entry to - * be the lastEntry. - * */ - private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { - val e = new Entry(key, hash, value) - if (firstEntry eq null) firstEntry = e - else { - lastEntry.later = e - e.earlier = lastEntry - } - lastEntry = e - e - } - - /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ - private[this] def deleteEntry(e: Entry): Unit = { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null - e.later = null - e.next = null - } - - private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { - if (contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(key) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { - table(idx) match { - case null => - table(idx) = createNewEntry(key, hash, value) - case old => - var prev: Entry = null - var n = old - while ((n ne null) && n.hash <= hash) { - if (n.hash == hash && key == n.key) { - val old = n.value - n.value = value - return if (getOld) Some(old) else null - } - prev = n - n = n.next - } - val nnode = createNewEntry(key, hash, value) - if (prev eq null) { - nnode.next = old - table(idx) = nnode - } else { - nnode.next = prev.next - prev.next = nnode - } - } - contentSize += 1 - null - } - - private[this] def growTable(newlen: Int): Unit = { - if (newlen < 0) - throw new RuntimeException(s"new hash table size $newlen exceeds maximum") - var oldlen = table.length - threshold = newThreshold(newlen) - if (size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) - val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while (oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if (old ne null) { - preLow.next = null - preHigh.next = null - var lastLow = preLow - var lastHigh = preHigh - var n = old - while (n ne null) { - val next = n.next - if ((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if (old ne preLow.next) table(i) = preLow.next - if (preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - override def hashCode: Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - val tupleHashIterator = new LinkedHashMapIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override def extract(nd: Entry): Any = { - hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) - this - } - } - MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) - } - } - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "LinkedHashMap" -} - -/** $factoryInfo - * @define Coll `LinkedHashMap` - * @define coll linked hash map - */ -@SerialVersionUID(3L) -object LinkedHashMap extends MapFactory[LinkedHashMap] { - - def empty[sealed K, sealed V] = new LinkedHashMap[K, V] - - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^) = { - val newlhm = empty[K, V] - newlhm.sizeHint(it.knownSize) - newlhm.addAll(it) - newlhm - } - - def newBuilder[sealed K, sealed V] = new GrowableBuilder(empty[K, V]) - - /** Class for the linked hash map entry, used internally. - */ - private[mutable] final class LinkedEntry[sealed K, sealed V](val key: K, val hash: Int, var value: V) { - var earlier: LinkedEntry[K, V] = null - var later: LinkedEntry[K, V] = null - var next: LinkedEntry[K, V] = null - - @tailrec - final def findEntry(k: K, h: Int): LinkedEntry[K, V] = - if (h == hash && k == key) this - else if ((next eq null) || (hash > h)) null - else next.findEntry(k, h) - } - - /** The default load factor for the hash table */ - private[collection] final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - private[collection] final def defaultinitialSize: Int = 16 -} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala deleted file mode 100644 index a895034a852c..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala +++ /dev/null @@ -1,349 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.{nowarn, tailrec} -import scala.collection.generic.DefaultSerializable -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** This class implements mutable sets using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam A the type of the elements contained in this set. - * - * @define Coll `LinkedHashSet` - * @define coll linked hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") -class LinkedHashSet[sealed A] - extends AbstractSet[A] - with SetOps[A, LinkedHashSet, LinkedHashSet[A]] - with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] - with IterableFactoryDefaults[A, LinkedHashSet] - with DefaultSerializable { - - override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet - - // stepper is not overridden to use XTableStepper because that stepper would not return the - // elements in insertion order - - /*private*/ type Entry = LinkedHashSet.Entry[A] - - protected var firstEntry: Entry = null - - protected var lastEntry: Entry = null - - /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. - * - Every bucket is sorted in ascendant hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) - - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def last: A = - if (size > 0) lastEntry.key - else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") - - override def lastOption: Option[A] = - if (size > 0) Some(lastEntry.key) - else None - - override def head: A = - if (size > 0) firstEntry.key - else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") - - override def headOption: Option[A] = - if (size > 0) Some(firstEntry.key) - else None - - override def size: Int = contentSize - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - - def contains(elem: A): Boolean = findEntry(elem) ne null - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) - if (target > table.length) growTable(target) - } - - override def add(elem: A): Boolean = { - if (contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(elem) - put0(elem, hash, index(hash)) - } - - def addOne(elem: A): this.type = { - add(elem) - this - } - - def subtractOne(elem: A): this.type = { - remove(elem) - this - } - - override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) - - private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { - private[this] var cur = firstEntry - def extract(nd: Entry): T - def hasNext: Boolean = cur ne null - def next(): T = - if (hasNext) { val r = extract(cur); cur = cur.later; r } - else Iterator.empty.next() - } - - def iterator: Iterator[A] = new LinkedHashSetIterator[A] { - override def extract(nd: Entry): A = nd.key - } - - private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { - override def extract(nd: Entry): Entry = nd - } - - override def foreach[U](f: A => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key) - cur = cur.later - } - } - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - firstEntry = null - lastEntry = null - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt - - @`inline` private[this] def improveHash(originalHash: Int): Int = { - originalHash ^ (originalHash >>> 16) - } - - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of this key */ - @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - @`inline` private[this] def findEntry(key: A): Entry = { - val hash = computeHash(key) - table(index(hash)) match { - case null => null - case nd => nd.findEntry(key, hash) - } - } - - /*create a new entry. If table is empty(firstEntry is null), then the - * new entry will be the firstEntry. If not, just set the new entry to - * be the lastEntry. - * */ - private[this] def createNewEntry(key: A, hash: Int): Entry = { - val e = new Entry(key, hash) - if (firstEntry eq null) firstEntry = e - else { - lastEntry.later = e - e.earlier = lastEntry - } - lastEntry = e - e - } - - /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ - private[this] def deleteEntry(e: Entry): Unit = { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null - e.later = null - e.next = null - } - - private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { - table(idx) match { - case null => - table(idx) = createNewEntry(elem, hash) - case old => - var prev: Entry = null - var n = old - while ((n ne null) && n.hash <= hash) { - if (n.hash == hash && elem == n.key) return false - prev = n - n = n.next - } - val nnode = createNewEntry(elem, hash) - if (prev eq null) { - nnode.next = old - table(idx) = nnode - } else { - nnode.next = prev.next - prev.next = nnode - } - } - contentSize += 1 - true - } - - private[this] def remove0(elem: A, hash: Int): Boolean = { - val idx = index(hash) - table(idx) match { - case null => false - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - deleteEntry(nd) - contentSize -= 1 - true - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while ((next ne null) && next.hash <= hash) { - if (next.hash == hash && next.key == elem) { - prev.next = next.next - deleteEntry(next) - contentSize -= 1 - return true - } - prev = next - next = next.next - } - false - } - } - - private[this] def growTable(newlen: Int): Unit = { - if (newlen < 0) - throw new RuntimeException(s"new hash table size $newlen exceeds maximum") - var oldlen = table.length - threshold = newThreshold(newlen) - if (size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow = new Entry(null.asInstanceOf[A], 0) - val preHigh = new Entry(null.asInstanceOf[A], 0) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while (oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if (old ne null) { - preLow.next = null - preHigh.next = null - var lastLow = preLow - var lastHigh = preHigh - var n = old - while (n ne null) { - val next = n.next - if ((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if (old ne preLow.next) table(i) = preLow.next - if (preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - override def hashCode: Int = { - val setHashIterator = - if (isEmpty) this.iterator - else { - new LinkedHashSetIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override def extract(nd: Entry): Any = { - hash = unimproveHash(nd.hash) - this - } - } - } - MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) - } - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "LinkedHashSet" -} - -/** $factoryInfo - * @define Coll `LinkedHashSet` - * @define coll linked hash set - */ -@SerialVersionUID(3L) -object LinkedHashSet extends IterableFactory[LinkedHashSet] { - - override def empty[sealed A]: LinkedHashSet[A] = new LinkedHashSet[A] - - def from[sealed E](it: collection.IterableOnce[E]^) = { - val newlhs = empty[E] - newlhs.sizeHint(it.knownSize) - newlhs.addAll(it) - newlhs - } - - def newBuilder[sealed A] = new GrowableBuilder(empty[A]) - - /** Class for the linked hash set entry, used internally. - */ - private[mutable] final class Entry[sealed A](val key: A, val hash: Int) { - var earlier: Entry[A] = null - var later: Entry[A] = null - var next: Entry[A] = null - - @tailrec - final def findEntry(k: A, h: Int): Entry[A] = - if (h == hash && k == key) this - else if ((next eq null) || (hash > h)) null - else next.findEntry(k, h) - } - - /** The default load factor for the hash table */ - private[collection] final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - private[collection] final def defaultinitialSize: Int = 16 -} - diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala deleted file mode 100644 index 8ddbc264e47b..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ListMap.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.tailrec -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.List -import language.experimental.captureChecking - -/** A simple mutable map backed by a list, so it preserves insertion order. - * - * @tparam K the type of the keys contained in this list map. - * @tparam V the type of the values assigned to keys in this list map. - * - * @define Coll `mutable.ListMap` - * @define coll mutable list map - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") -class ListMap[sealed K, sealed V] - extends AbstractMap[K, V] - with MapOps[K, V, ListMap, ListMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] - with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] - with MapFactoryDefaults[K, V, ListMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[ListMap] = ListMap - - private[this] var elems: List[(K, V)] = List() - private[this] var siz: Int = 0 - - def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) - def iterator: Iterator[(K, V)] = elems.iterator - - final override def addOne(kv: (K, V)) = { - val (e, key0) = remove(kv._1, elems, List()) - elems = (key0, kv._2) :: e - siz += 1; this - } - - final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } - - @tailrec - private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { - if (elems.isEmpty) (acc, key) - else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } - else remove(key, elems.tail, elems.head :: acc) - } - - final override def clear(): Unit = { elems = List(); siz = 0 } - - final override def size: Int = siz - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - override protected[this] def stringPrefix = "ListMap" -} - -/** $factoryInfo - * @define Coll `mutable.ListMap` - * @define coll mutable list map - */ -@SerialVersionUID(3L) -@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") -object ListMap extends MapFactory[ListMap] { - def empty[sealed K, sealed V]: ListMap[K, V] = new ListMap[K, V] - def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[sealed K, sealed V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) -} diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala deleted file mode 100644 index 2c757160ec77..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/LongMap.scala +++ /dev/null @@ -1,674 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.collection.generic.DefaultSerializationProxy -import scala.language.implicitConversions -import language.experimental.captureChecking - -/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically substantially faster with `LongMap` than [[HashMap]]. Methods - * that act on the whole map, including `foreach` and `map` are not in - * general expected to be faster than with a generic map, save for those - * that take particular advantage of the internal structure of the map: - * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `LongMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29 entries (approximately - * 500 million). The maximum capacity is 2^30, but performance will degrade - * rapidly as 2^30 is approached. - * - */ -final class LongMap[sealed V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean) - extends AbstractMap[Long, V] - with MapOps[Long, V, Map, LongMap[V]] - with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] - with Serializable { - import LongMap._ - - def this() = this(LongMap.exceptionDefault, 16, true) - - // TODO: override clear() with an optimization more tailored for efficiency. - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = { - //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? - val b = newSpecificBuilder - b.sizeHint(coll) - b.addAll(coll) - b.result() - } - override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) - - /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true) - - /** Creates a new `LongMap` with an initial buffer of specified size. - * - * A LongMap can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) - - /** Creates a new `LongMap` with specified default values and initial buffer size. */ - def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - - private[this] var mask = 0 - private[this] var extraKeys: Int = 0 - private[this] var zeroValue: AnyRef = null - private[this] var minValue: AnyRef = null - private[this] var _size = 0 - private[this] var _vacant = 0 - private[this] var _keys: Array[Long] = null - private[this] var _values: Array[AnyRef] = null - - if (initBlank) defaultInitialize(initialBufferSize) - - private[this] def defaultInitialize(n: Int) = { - mask = - if (n<0) 0x7 - else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 - _keys = new Array[Long](mask+1) - _values = new Array[AnyRef](mask+1) - } - - private[collection] def initializeTo( - m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] - ): Unit = { - mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz - } - - override def size: Int = _size + (extraKeys+1)/2 - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - override def empty: LongMap[V] = new LongMap() - - private def imbalanced: Boolean = - (_size + _vacant) > 0.5*mask || _vacant > _size - - private def toIndex(k: Long): Int = { - // Part of the MurmurHash3 32 bit finalizer - val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt - val x = (h ^ (h >>> 16)) * 0x85EBCA6B - (x ^ (x >>> 13)) & mask - } - - private def seekEmpty(k: Long): Int = { - var e = toIndex(k) - var x = 0 - while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - e - } - - private def seekEntry(k: Long): Int = { - var e = toIndex(k) - var x = 0 - var q = 0L - while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - e | MissingBit - } - - private def seekEntryOrOpen(k: Long): Int = { - var e = toIndex(k) - var x = 0 - var q = 0L - while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - if (q == 0) return e | MissingBit - val o = e | MissVacant - while ({ q = _keys(e); if (q==k) return e; q != 0}) { - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - o - } - - override def contains(key: Long): Boolean = { - if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 - else seekEntry(key) >= 0 - } - - override def get(key: Long): Option[V] = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) None - else if (key == 0) Some(zeroValue.asInstanceOf[V]) - else Some(minValue.asInstanceOf[V]) - } - else { - val i = seekEntry(key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) - } - } - - override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) default - else if (key == 0) zeroValue.asInstanceOf[V1] - else minValue.asInstanceOf[V1] - } - else { - val i = seekEntry(key) - if (i < 0) default else _values(i).asInstanceOf[V1] - } - } - - override def getOrElseUpdate(key: Long, defaultValue: => V): V = { - if (key == -key) { - val kbits = (key>>>63).toInt + 1 - if ((kbits & extraKeys) == 0) { - val value = defaultValue - extraKeys |= kbits - if (key == 0) zeroValue = value.asInstanceOf[AnyRef] - else minValue = value.asInstanceOf[AnyRef] - value - } - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - var i = seekEntryOrOpen(key) - if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) - val value = { - val ok = _keys - val ans = defaultValue - if (ok ne _keys) { - i = seekEntryOrOpen(key) - if (i >= 0) _size -= 1 - } - ans - } - _size += 1 - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - value - } - else _values(i).asInstanceOf[V] - } - } - - /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ - def getOrNull(key: Long): V = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - val i = seekEntry(key) - if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] - } - } - - /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead. - */ - override def apply(key: Long): V = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - val i = seekEntry(key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] - } - } - - /** The user-supplied default value for the key. Throws an exception - * if no other default behavior was specified. - */ - override def default(key: Long) = defaultEntry(key) - - private def repack(newMask: Int): Unit = { - val ok = _keys - val ov = _values - mask = newMask - _keys = new Array[Long](mask+1) - _values = new Array[AnyRef](mask+1) - _vacant = 0 - var i = 0 - while (i < ok.length) { - val k = ok(i) - if (k != -k) { - val j = seekEmpty(k) - _keys(j) = k - _values(j) = ov(i) - } - i += 1 - } - } - - /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. - * - * For maps that undergo a complex creation process with both addition and - * removal of keys, and then are used heavily with no further removal of - * elements, calling `repack` after the end of the creation can result in - * improved performance. Repacking takes time proportional to the number - * of entries in the map. - */ - def repack(): Unit = { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } - - override def put(key: Long, value: V): Option[V] = { - if (key == -key) { - if (key == 0) { - val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None - zeroValue = value.asInstanceOf[AnyRef] - extraKeys |= 1 - ans - } - else { - val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None - minValue = value.asInstanceOf[AnyRef] - extraKeys |= 2 - ans - } - } - else { - val i = seekEntryOrOpen(key) - if (i < 0) { - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - None - } - else { - val ans = Some(_values(i).asInstanceOf[V]) - _keys(i) = key - _values(i) = value.asInstanceOf[AnyRef] - ans - } - } - } - - /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to a `LongMap`. - */ - override def update(key: Long, value: V): Unit = { - if (key == -key) { - if (key == 0) { - zeroValue = value.asInstanceOf[AnyRef] - extraKeys |= 1 - } - else { - minValue = value.asInstanceOf[AnyRef] - extraKeys |= 2 - } - } - else { - val i = seekEntryOrOpen(key) - if (i < 0) { - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - } - else { - _keys(i) = key - _values(i) = value.asInstanceOf[AnyRef] - } - } - } - - /** Adds a new key/value pair to this map and returns the map. */ - @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") - def +=(key: Long, value: V): this.type = { update(key, value); this } - - /** Adds a new key/value pair to this map and returns the map. */ - @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } - - @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } - - def subtractOne(key: Long): this.type = { - if (key == -key) { - if (key == 0L) { - extraKeys &= 0x2 - zeroValue = null - } - else { - extraKeys &= 0x1 - minValue = null - } - } - else { - val i = seekEntry(key) - if (i >= 0) { - _size -= 1 - _vacant += 1 - _keys(i) = Long.MinValue - _values(i) = null - } - } - this - } - - def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { - private[this] val kz = _keys - private[this] val vz = _values - - private[this] var nextPair: (Long, V) = - if (extraKeys==0) null - else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) - else (Long.MinValue, minValue.asInstanceOf[V]) - - private[this] var anotherPair: (Long, V) = - if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) - else null - - private[this] var index = 0 - - def hasNext: Boolean = nextPair != null || (index < kz.length && { - var q = kz(index) - while (q == -q) { - index += 1 - if (index >= kz.length) return false - q = kz(index) - } - nextPair = (kz(index), vz(index).asInstanceOf[V]) - index += 1 - true - }) - def next() = { - if (nextPair == null && !hasNext) throw new NoSuchElementException("next") - val ans = nextPair - if (anotherPair != null) { - nextPair = anotherPair - anotherPair = null - } - else nextPair = null - ans - } - } - - // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. - override def keysIterator: Iterator[Long] = super.keysIterator - override def valuesIterator: Iterator[V] = super.valuesIterator - - override def foreach[U](f: ((Long,V)) => U): Unit = { - if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) - if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f((k, _values(i).asInstanceOf[V])) - } - i += 1 - } - } - - override def foreachEntry[U](f: (Long,V) => U): Unit = { - if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) - if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(k, _values(i).asInstanceOf[V]) - } - i += 1 - } - } - - override def clone(): LongMap[V] = { - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) - val lm = new LongMap[V](defaultEntry, 1, false) - lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) - lm - } - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - lm += kv - lm - } - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [sealed V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { - val m = this + elem1 + elem2 - if(elems.isEmpty) m else m.concat(elems) - } - - override def concat[sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - xs.iterator.foreach(kv => lm += kv) - lm - } - - override def ++ [sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs) - - @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") - override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = - clone().asInstanceOf[LongMap[V1]].addOne(key, value) - - /** Applies a function to all keys of this map. */ - def foreachKey[A](f: Long => A): Unit = { - if ((extraKeys & 1) == 1) f(0L) - if ((extraKeys & 2) == 2) f(Long.MinValue) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(k) - } - i += 1 - } - } - - /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A): Unit = { - if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) - if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(_values(i).asInstanceOf[V]) - } - i += 1 - } - } - - /** Creates a new `LongMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ - def mapValuesNow[sealed V1](f: V => V1): LongMap[V1] = { - val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) - lm - } - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") - @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValuesInPlace(f: V => V): this.type = { - if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] - if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - this - } - - def map[sealed V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) - - def flatMap[sealed V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - - def collect[sealed V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = - strictOptimizedCollect(LongMap.newBuilder[V2], pf) - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) - - override protected[this] def className = "LongMap" -} - -object LongMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 - - private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString) - - /** A builder for instances of `LongMap`. - * - * This builder can be reused to create multiple instances. - */ - final class LongMapBuilder[sealed V] extends ReusableBuilder[(Long, V), LongMap[V]] { - private[collection] var elems: LongMap[V] = new LongMap[V] - override def addOne(entry: (Long, V)): this.type = { - elems += entry - this - } - def clear(): Unit = elems = new LongMap[V] - def result(): LongMap[V] = elems - override def knownSize: Int = elems.knownSize - } - - /** Creates a new `LongMap` with zero or more key/value pairs. */ - def apply[sealed V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) - - private def buildFromIterableOnce[sealed V](elems: IterableOnce[(Long, V)]^): LongMap[V] = { - var sz = elems.knownSize - if(sz < 0) sz = 4 - val lm = new LongMap[V](sz * 2) - elems.iterator.foreach{ case (k,v) => lm(k) = v } - if (lm.size < (sz>>3)) lm.repack() - lm - } - - /** Creates a new empty `LongMap`. */ - def empty[sealed V]: LongMap[V] = new LongMap[V] - - /** Creates a new empty `LongMap` with the supplied default */ - def withDefault[sealed V](default: Long -> V): LongMap[V] = new LongMap[V](default) - - /** Creates a new `LongMap` from an existing source collection. A source collection - * which is already a `LongMap` gets cloned. - * - * @param source Source collection - * @tparam A the type of the collection’s elements - * @return a new `LongMap` with the elements of `source` - */ - def from[sealed V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match { - case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] - case _ => buildFromIterableOnce(source) - } - - def newBuilder[sealed V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] - - /** Creates a new `LongMap` from arrays of keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[sealed V](keys: Array[Long], values: Array[V]): LongMap[V] = { - val sz = math.min(keys.length, values.length) - val lm = new LongMap[V](sz * 2) - var i = 0 - while (i < sz) { lm(keys(i)) = values(i); i += 1 } - if (lm.size < (sz>>3)) lm.repack() - lm - } - - /** Creates a new `LongMap` from keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[sealed V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { - val sz = math.min(keys.size, values.size) - val lm = new LongMap[V](sz * 2) - val ki = keys.iterator - val vi = values.iterator - while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() - if (lm.size < (sz >> 3)) lm.repack() - lm - } - - implicit def toFactory[sealed V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) - def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] - } - - implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] - private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) - def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] - } - - implicit def iterableFactory[sealed V]: Factory[(Long, V), LongMap[V]] = toFactory(this) - implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) -} diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala deleted file mode 100644 index dab64ddb1f58..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Map.scala +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import language.experimental.captureChecking - -/** Base type of mutable Maps */ -trait Map[K, V] - extends Iterable[(K, V)] - with collection.Map[K, V] - with MapOps[K, V, Map, Map[K, V]] - with Growable[(K, V)] - with Shrinkable[K] - with MapFactoryDefaults[K, V, Map, Iterable] { - - override def mapFactory: scala.collection.MapFactory[Map] = Map - - /* - //TODO consider keeping `remove` because it returns the removed entry - @deprecated("Use subtract or -= instead of remove", "2.13.0") - def remove(key: K): Option[V] = { - val old = get(key) - if(old.isDefined) subtract(key) - old - } - */ - - /** The same map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) -} - -/** - * @define coll mutable map - * @define Coll `mutable.Map` - */ -trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] - extends IterableOps[(K, V), Iterable, C] - with collection.MapOps[K, V, CC, C] - with Cloneable[C] - with Builder[(K, V), C] - with Growable[(K, V)] - with Shrinkable[K] - with Pure { - - def result(): C = coll - - @deprecated("Use - or remove on an immutable Map", "2.13.0") - final def - (key: K): C = clone() -= key - - @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") - final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys - - /** Adds a new key/value pair to this map and optionally returns previously bound value. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key the key to update - * @param value the new value - * @return an option value containing the value associated with the key - * before the `put` operation was executed, or `None` if `key` - * was not defined in the map before. - */ - def put(key: K, value: V): Option[V] = { - val r = get(key) - update(key, value) - r - } - - /** Adds a new key/value pair to this map. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key The key to update - * @param value The new value - */ - def update(key: K, value: V): Unit = { coll += ((key, value)) } - - /** - * Update a mapping for the specified key and its current optionally-mapped value - * (`Some` if there is current mapping, `None` if not). - * - * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. - * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). - * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. - * - * @param key the key value - * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping - * @return the new value associated with the specified key - */ - def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val previousValue = this.get(key) - val nextValue = remappingFunction(previousValue) - (previousValue, nextValue) match { - case (None, None) => // do nothing - case (Some(_), None) => this.remove(key) - case (_, Some(v)) => this.update(key,v) - } - nextValue - } - - /** If given key is already in this map, returns associated value. - * - * Otherwise, computes value from given expression `op`, stores with key - * in map and returns that value. - * - * Concurrent map implementations may evaluate the expression `op` - * multiple times, or may evaluate `op` without inserting the result. - * - * @param key the key to test - * @param op the computation yielding the value to associate with `key`, if - * `key` is previously unbound. - * @return the value associated with key (either previously or as a result - * of executing the method). - */ - def getOrElseUpdate(key: K, op: => V): V = - get(key) match { - case Some(v) => v - case None => val d = op; this(key) = d; d - } - - /** Removes a key from this map, returning the value associated previously - * with that key as an option. - * @param key the key to be removed - * @return an option value containing the value associated previously with `key`, - * or `None` if `key` was not defined in the map before. - */ - def remove(key: K): Option[V] = { - val r = get(key) - if (r.isDefined) this -= key - r - } - - def clear(): Unit = { keysIterator foreach -= } - - override def clone(): C = empty ++= this - - @deprecated("Use filterInPlace instead", "2.13.0") - @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) - - /** Retains only those mappings for which the predicate - * `p` returns `true`. - * - * @param p The test predicate - */ - def filterInPlace(p: (K, V) => Boolean): this.type = { - if (!isEmpty) this match { - case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) - case _ => - val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val (k, v) = array(i).asInstanceOf[(K, V)] - if (!p(k, v)) { - this -= k - } - i += 1 - } - } - this - } - - @deprecated("Use mapValuesInPlace instead", "2.13.0") - @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) - - /** Applies a transformation function to all values contained in this map. - * The transformation function produces new values from existing keys - * associated values. - * - * @param f the transformation to apply - * @return the map itself. - */ - def mapValuesInPlace(f: (K, V) => V): this.type = { - if (!isEmpty) this match { - case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) - case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) - case _ => - val array = this.toArray[Any] - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val (k, v) = array(i).asInstanceOf[(K, V)] - update(k, f(k, v)) - i += 1 - } - } - this - } - - @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") - def updated[V1 >: V](key: K, value: V1): CC[K, V1] = - clone().asInstanceOf[CC[K, V1]].addOne((key, value)) - - override def knownSize: Int = super[IterableOps].knownSize -} - -/** - * $factoryInfo - * @define coll mutable map - * @define Coll `mutable.Map` - */ -@SerialVersionUID(3L) -object Map extends MapFactory.Delegate[Map](HashMap) { - - @SerialVersionUID(3L) - class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V) - extends AbstractMap[K, V] - with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { - - override def default(key: K): V = defaultValue(key) - - def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = underlying.knownSize - override def mapFactory: MapFactory[Map] = underlying.mapFactory - - override def clear(): Unit = underlying.clear() - - def get(key: K): Option[V] = underlying.get(key) - - def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } - - def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } - - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] = - underlying.concat(suffix).withDefault(defaultValue) - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = - new WithDefault[K, V](mapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = - Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) - } - -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala deleted file mode 100644 index 281631c92298..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable - -import language.experimental.captureChecking - -/** A trait for mutable maps with multiple values assigned to a key. - * - * This class is typically used as a mixin. It turns maps which map `K` - * to `Set[V]` objects into multimaps that map `K` to `V` objects. - * - * @example {{{ - * // first import all necessary types from package `collection.mutable` - * import collection.mutable.{ HashMap, MultiMap, Set } - * - * // to create a `MultiMap` the easiest way is to mixin it into a normal - * // `Map` instance - * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] - * - * // to add key-value pairs to a multimap it is important to use - * // the method `addBinding` because standard methods like `+` will - * // overwrite the complete key-value pair instead of adding the - * // value to the existing key - * mm.addBinding(1, "a") - * mm.addBinding(2, "b") - * mm.addBinding(1, "c") - * - * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` - * - * // to check if the multimap contains a value there is method - * // `entryExists`, which allows to traverse the including set - * mm.entryExists(1, _ == "a") == true - * mm.entryExists(1, _ == "b") == false - * mm.entryExists(2, _ == "b") == true - * - * // to remove a previous added value there is the method `removeBinding` - * mm.removeBinding(1, "a") - * mm.entryExists(1, _ == "a") == false - * }}} - * - * @define coll multimap - * @define Coll `MultiMap` - */ -@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") -trait MultiMap[K, sealed V] extends Map[K, Set[V]] { - /** Creates a new set. - * - * Classes that use this trait as a mixin can override this method - * to have the desired implementation of sets assigned to new keys. - * By default this is `HashSet`. - * - * @return An empty set of values of type `V`. - */ - protected def makeSet: Set[V] = new HashSet[V] - - /** Assigns the specified `value` to a specified `key`. If the key - * already has a binding to equal to `value`, nothing is changed; - * otherwise a new binding is added for that `key`. - * - * @param key The key to which to bind the new value. - * @param value The value to bind to the key. - * @return A reference to this multimap. - */ - def addBinding(key: K, value: V): this.type = { - get(key) match { - case None => - val set = makeSet - set += value - this(key) = set - case Some(set) => - set += value - } - this - } - - /** Removes the binding of `value` to `key` if it exists, otherwise this - * operation doesn't have any effect. - * - * If this was the last value assigned to the specified key, the - * set assigned to that key will be removed as well. - * - * @param key The key of the binding. - * @param value The value to remove. - * @return A reference to this multimap. - */ - def removeBinding(key: K, value: V): this.type = { - get(key) match { - case None => - case Some(set) => - set -= value - if (set.isEmpty) this -= key - } - this - } - - /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. - * - * @param key The key for which the predicate is checked. - * @param p The predicate which a value assigned to the key must satisfy. - * @return A boolean if such a binding exists - */ - def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { - case None => false - case Some(set) => set exists p - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala deleted file mode 100644 index f1deb25b6a8a..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import java.lang.Integer.numberOfLeadingZeros -import java.util.ConcurrentModificationException -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking - -/** - * @define Coll `OpenHashMap` - * @define coll open hash map - */ -@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") -@SerialVersionUID(3L) -object OpenHashMap extends MapFactory[OpenHashMap] { - - def empty[sealed K, sealed V] = new OpenHashMap[K, V] - def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it - - def newBuilder[sealed K, sealed V]: Builder[(K, V), OpenHashMap[K,V]] = - new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) - - /** A hash table entry. - * - * The entry is occupied if and only if its `value` is a `Some`; - * deleted if and only if its `value` is `None`. - * If its `key` is not the default value of type `Key`, the entry is occupied. - * If the entry is occupied, `hash` contains the hash value of `key`. - */ - final private class OpenEntry[sealed Key, sealed Value](var key: Key, - var hash: Int, - var value: Option[Value]) - - private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) -} - -/** A mutable hash map based on an open addressing method. The precise scheme is - * undefined, but it should make a reasonable effort to ensure that an insert - * with consecutive hash codes is not unnecessarily penalised. In particular, - * mappings of consecutive integer keys should work without significant - * performance loss. - * - * @tparam Key type of the keys in this map. - * @tparam Value type of the values in this map. - * @param initialSize the initial size of the internal hash table. - * - * @define Coll `OpenHashMap` - * @define coll open hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") -class OpenHashMap[sealed Key, sealed Value](initialSize : Int) - extends AbstractMap[Key, Value] - with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] - with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] - with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] - with DefaultSerializable { - - import OpenHashMap.OpenEntry - private type Entry = OpenEntry[Key, Value] - - /** A default constructor creates a hashmap with initial size `8`. - */ - def this() = this(8) - - override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap - - private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) - - private[this] var mask = actualInitialSize - 1 - - /** The hash table. - * - * The table's entries are initialized to `null`, indication of an empty slot. - * A slot is either deleted or occupied if and only if the entry is non-`null`. - */ - private[this] var table = new Array[Entry](actualInitialSize) - - private[this] var _size = 0 - private[this] var deleted = 0 - - // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. - private[this] var modCount = 0 - - override def size = _size - override def knownSize: Int = size - private[this] def size_=(s : Int): Unit = _size = s - override def isEmpty: Boolean = _size == 0 - /** Returns a mangled hash code of the provided key. */ - protected def hashOf(key: Key) = { - var h = key.## - h ^= ((h >>> 20) ^ (h >>> 12)) - h ^ (h >>> 7) ^ (h >>> 4) - } - - /** Increase the size of the table. - * Copy only the occupied slots, effectively eliminating the deleted slots. - */ - private[this] def growTable() = { - val oldSize = mask + 1 - val newSize = 4 * oldSize - val oldTable = table - table = new Array[Entry](newSize) - mask = newSize - 1 - oldTable.foreach( entry => - if (entry != null && entry.value != None) - table(findIndex(entry.key, entry.hash)) = entry ) - deleted = 0 - } - - /** Return the index of the first slot in the hash table (in probe order) - * that is, in order of preference, either occupied by the given key, deleted, or empty. - * - * @param hash hash value for `key` - */ - private[this] def findIndex(key: Key, hash: Int): Int = { - var index = hash & mask - var j = 0 - - // Index of the first slot containing a deleted entry, or -1 if none found yet - var firstDeletedIndex = -1 - - var entry = table(index) - while (entry != null) { - if (entry.hash == hash && entry.key == key && entry.value != None) - return index - - if (firstDeletedIndex == -1 && entry.value == None) - firstDeletedIndex = index - - j += 1 - index = (index + j) & mask - entry = table(index) - } - - if (firstDeletedIndex == -1) index else firstDeletedIndex - } - - // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. - override def update(key: Key, value: Value): Unit = put(key, value) - - @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") - def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } - - @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") - def subtractOne (key: Key): this.type = { remove(key); this } - - override def put(key: Key, value: Value): Option[Value] = - put(key, hashOf(key), value) - - private def put(key: Key, hash: Int, value: Value): Option[Value] = { - if (2 * (size + deleted) > mask) growTable() - val index = findIndex(key, hash) - val entry = table(index) - if (entry == null) { - table(index) = new OpenEntry(key, hash, Some(value)) - modCount += 1 - size += 1 - None - } else { - val res = entry.value - if (entry.value == None) { - entry.key = key - entry.hash = hash - size += 1 - deleted -= 1 - modCount += 1 - } - entry.value = Some(value) - res - } - } - - /** Delete the hash table slot contained in the given entry. */ - @`inline` - private[this] def deleteSlot(entry: Entry) = { - entry.key = null.asInstanceOf[Key] - entry.hash = 0 - entry.value = None - - size -= 1 - deleted += 1 - } - - override def remove(key : Key): Option[Value] = { - val entry = table(findIndex(key, hashOf(key))) - if (entry != null && entry.value != None) { - val res = entry.value - deleteSlot(entry) - res - } else None - } - - def get(key : Key) : Option[Value] = { - val hash = hashOf(key) - var index = hash & mask - var entry = table(index) - var j = 0 - while(entry != null){ - if (entry.hash == hash && - entry.key == key){ - return entry.value - } - - j += 1 - index = (index + j) & mask - entry = table(index) - } - None - } - - /** An iterator over the elements of this map. Use of this iterator follows - * the same contract for concurrent modification as the foreach method. - * - * @return the iterator - */ - def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { - override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) - } - - override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { - override protected def nextResult(node: Entry): Key = node.key - } - override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { - override protected def nextResult(node: Entry): Value = node.value.get - } - - private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { - private[this] var index = 0 - private[this] val initialModCount = modCount - - private[this] def advance(): Unit = { - if (initialModCount != modCount) throw new ConcurrentModificationException - while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 - } - - def hasNext = {advance(); index <= mask } - - def next() = { - advance() - val result = table(index) - index += 1 - nextResult(result) - } - protected def nextResult(node: Entry): A - } - - override def clone() = { - val it = new OpenHashMap[Key, Value] - foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) - it - } - - /** Loop over the key, value mappings of this map. - * - * The behaviour of modifying the map during an iteration is as follows: - * - Deleting a mapping is always permitted. - * - Changing the value of mapping which is already present is permitted. - * - Anything else is not permitted. It will usually, but not always, throw an exception. - * - * @tparam U The return type of the specified function `f`, return result of which is ignored. - * @param f The function to apply to each key, value mapping. - */ - override def foreach[U](f : ((Key, Value)) => U): Unit = { - val startModCount = modCount - foreachUndeletedEntry(entry => { - if (modCount != startModCount) throw new ConcurrentModificationException - f((entry.key, entry.value.get))} - ) - } - override def foreachEntry[U](f : (Key, Value) => U): Unit = { - val startModCount = modCount - foreachUndeletedEntry(entry => { - if (modCount != startModCount) throw new ConcurrentModificationException - f(entry.key, entry.value.get)} - ) - } - - private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { - table.foreach(entry => if (entry != null && entry.value != None) f(entry)) - } - - override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { - foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) - this - } - - override def filterInPlace(f : (Key, Value) => Boolean): this.type = { - foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) - this - } - - override protected[this] def stringPrefix = "OpenHashMap" -} diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala deleted file mode 100644 index a395fac4a44a..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala +++ /dev/null @@ -1,403 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.collection.generic.DefaultSerializationProxy -import scala.math.Ordering -import language.experimental.captureChecking - -/** A heap-based priority queue. - * - * To prioritize elements of type `A` there must be an implicit - * `Ordering[A]` available at creation. Elements are retrieved - * in priority order by using [[dequeue]] or [[dequeueAll]]. - * - * If multiple elements have the same priority as determined by the ordering for this - * `PriorityQueue`, no guarantees are made regarding the order in which those elements - * are returned by `dequeue` or `dequeueAll`. In particular, that means this - * class does not guarantee first-in-first-out behavior, as may be - * incorrectly inferred from the fact that this data structure is - * called a "queue". - * - * Only the `dequeue` and `dequeueAll` methods will return elements in priority - * order (while removing elements from the heap). Standard collection methods - * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary - * iteration order: they will traverse the heap or remove elements - * in whichever order seems most convenient. - * - * Therefore, printing a `PriorityQueue` will not show elements in priority order, - * though the highest-priority element will be printed first. - * To print the elements in order, it's necessary to `dequeue` them. - * To do this non-destructively, duplicate the `PriorityQueue` first; - * the `clone` method is a suitable way to obtain a disposable copy. - * - * Client keys are assumed to be immutable. Mutating keys may violate - * the invariant of the underlying heap-ordered tree. Note that [[clone]] - * does not rebuild the underlying tree. - * - * {{{ - * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) - * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) - * - * scala> pq.toList // also not in order - * val res0: List[Int] = List(7, 3, 5, 1, 2) - * - * scala> pq.clone.dequeueAll - * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) - * }}} - * - * @tparam A type of the elements in this priority queue. - * @param ord implicit ordering used to compare the elements of type `A`. - * - * @define Coll PriorityQueue - * @define coll priority queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class PriorityQueue[sealed A](implicit val ord: Ordering[A]) - extends AbstractIterable[A] - with Iterable[A] - with IterableOps[A, Iterable, PriorityQueue[A]] - with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] - with Builder[A, PriorityQueue[A]] - with Cloneable[PriorityQueue[A]] - with Growable[A] - with Serializable -{ - - private class ResizableArrayAccess[sealed A0] extends ArrayBuffer[A0] { - override def mapInPlace(f: A0 => A0): this.type = { - var i = 1 // see "we do not use array(0)" comment below (???) - val siz = this.size - while (i < siz) { this(i) = f(this(i)); i += 1 } - this - } - - def p_size0 = size0 - def p_size0_=(s: Int) = size0 = s - def p_array = array - def p_ensureSize(n: Int) = super.ensureSize(n) - def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n) - def p_swap(a: Int, b: Int): Unit = { - val h = array(a) - array(a) = array(b) - array(b) = h - } - } - - private val resarr = new ResizableArrayAccess[A] - - resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for? - def length: Int = resarr.length - 1 // adjust length accordingly - override def size: Int = length - override def knownSize: Int = length - override def isEmpty: Boolean = resarr.p_size0 < 2 - - // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) - override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll) - override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder - override def empty: PriorityQueue[A] = PriorityQueue.empty - - def mapInPlace(f: A => A): this.type = { - resarr.mapInPlace(f) - heapify(1) - this - } - - def result() = this - - private def toA(x: AnyRef): A = x.asInstanceOf[A] - protected def fixUp(as: Array[AnyRef], m: Int): Unit = { - var k: Int = m - // use `ord` directly to avoid allocating `OrderingOps` - while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { - resarr.p_swap(k, k / 2) - k = k / 2 - } - } - - protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { - // returns true if any swaps were done (used in heapify) - var k: Int = m - while (n >= 2 * k) { - var j = 2 * k - // use `ord` directly to avoid allocating `OrderingOps` - if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) - j += 1 - if (ord.gteq(toA(as(k)), toA(as(j)))) - return k != m - else { - val h = as(k) - as(k) = as(j) - as(j) = h - k = j - } - } - k != m - } - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert. - * @return this $coll. - */ - def addOne(elem: A): this.type = { - resarr.p_ensureAdditionalSize(1) - resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] - fixUp(resarr.p_array, resarr.p_size0) - resarr.p_size0 += 1 - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = { - val from = resarr.p_size0 - for (x <- xs.iterator) unsafeAdd(x) - heapify(from) - this - } - - private def unsafeAdd(elem: A): Unit = { - // like += but skips fixUp, which breaks the ordering invariant - // a series of unsafeAdds MUST be followed by heapify - resarr.p_ensureAdditionalSize(1) - resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] - resarr.p_size0 += 1 - } - - private def heapify(from: Int): Unit = { - // elements at indices 1..from-1 were already in heap order before any adds - // elements at indices from..n are newly added, their order must be fixed - val n = length - - if (from <= 2) { - // no pre-existing order to maintain, do the textbook heapify algorithm - for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) - } - else if (n - from < 4) { - // for very small adds, doing the simplest fix is faster - for (i <- from to n) fixUp(resarr.p_array, i) - } - else { - var min = from/2 // tracks the minimum element in the queue - val queue = scala.collection.mutable.Queue[Int](min) - - // do fixDown on the parents of all the new elements - // except the parent of the first new element, which is in the queue - // (that parent is treated specially because it might be the root) - for (i <- n/2 until min by -1) { - if (fixDown(resarr.p_array, i, n)) { - // there was a swap, so also need to fixDown i's parent - val parent = i/2 - if (parent < min) { // make sure same parent isn't added twice - min = parent - queue += parent - } - } - } - - while (queue.nonEmpty) { - val i = queue.dequeue() - if (fixDown(resarr.p_array, i, n)) { - val parent = i/2 - if (parent < min && parent > 0) { - // the "parent > 0" is to avoid adding the parent of the root - min = parent - queue += parent - } - } - } - } - } - - /** Adds all elements provided by a `IterableOnce` object - * into the priority queue. - * - * @param xs a iterable object. - * @return a new priority queue containing elements of both `xs` and `this`. - */ - def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - def enqueue(elems: A*): Unit = { this ++= elems } - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @throws NoSuchElementException - * @return the element with the highest priority. - */ - def dequeue(): A = - if (resarr.p_size0 > 1) { - resarr.p_size0 = resarr.p_size0 - 1 - val result = resarr.p_array(1) - resarr.p_array(1) = resarr.p_array(resarr.p_size0) - resarr.p_array(resarr.p_size0) = null // erase reference from array - fixDown(resarr.p_array, 1, resarr.p_size0 - 1) - toA(result) - } else - throw new NoSuchElementException("no element to remove from heap") - - def dequeueAll[A1 >: A]: immutable.Seq[A1] = { - val b = ArrayBuilder.make[Any] - b.sizeHint(size) - while (nonEmpty) { - b += dequeue() - } - immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] - } - - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - def clear(): Unit = { - resarr.clear() - resarr.p_size0 = 1 - } - - /** Returns an iterator which yields all the elements. - * - * Note: The order of elements returned is undefined. - * If you want to traverse the elements in priority queue - * order, use `clone().dequeueAll.iterator`. - * - * @return an iterator over all the elements. - */ - override def iterator: Iterator[A] = resarr.iterator.drop(1) - - /** Returns the reverse of this priority queue. The new priority queue has - * the same elements as the original, but the opposite ordering. - * - * For example, the element with the highest priority in `pq` has the lowest - * priority in `pq.reverse`, and vice versa. - * - * Ties are handled arbitrarily. Elements with equal priority may or - * may not be reversed with respect to each other. - * - * @return the reversed priority queue. - */ - def reverse: PriorityQueue[A] = { - val revq = new PriorityQueue[A]()(ord.reverse) - // copy the existing data into the new array backwards - // this won't put it exactly into the correct order, - // but will require less fixing than copying it in - // the original order - val n = resarr.p_size0 - revq.resarr.p_ensureSize(n) - revq.resarr.p_size0 = n - val from = resarr.p_array - val to = revq.resarr.p_array - for (i <- 1 until n) to(i) = from(n-i) - revq.heapify(1) - revq - } - - - /** Returns an iterator which yields all the elements in the reverse order - * than that returned by the method `iterator`. - * - * Note: The order of elements returned is undefined. - * - * @return an iterator over all elements sorted in descending order. - */ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private[this] var i = resarr.p_size0 - 1 - def hasNext: Boolean = i >= 1 - def next(): A = { - val n = resarr.p_array(i) - i -= 1 - toA(n) - } - } - - /** Returns a regular queue containing the same elements. - * - * Note: the order of elements is undefined. - */ - def toQueue: Queue[A] = new Queue[A] ++= this.iterator - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString() = toList.mkString("PriorityQueue(", ", ", ")") - - /** Converts this $coll to a list. - * - * Note: the order of elements is undefined. - * - * @return a list containing all elements of this $coll. - */ - override def toList: immutable.List[A] = immutable.List.from(this.iterator) - - /** This method clones the priority queue. - * - * @return a priority queue with the same elements. - */ - override def clone(): PriorityQueue[A] = { - val pq = new PriorityQueue[A] - val n = resarr.p_size0 - pq.resarr.p_ensureSize(n) - java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) - pq.resarr.p_size0 = n - pq - } - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if (copied > 0) { - Array.copy(resarr.p_array, 1, xs, start, copied) - } - copied - } - - @deprecated("Use `PriorityQueue` instead", "2.13.0") - def orderedCompanion: PriorityQueue.type = PriorityQueue - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) - - override protected[this] def className = "PriorityQueue" -} - - -@SerialVersionUID(3L) -object PriorityQueue extends SortedIterableFactory[PriorityQueue] { - def newBuilder[sealed A : Ordering]: Builder[A, PriorityQueue[A]] = { - new Builder[A, PriorityQueue[A]] { - val pq = new PriorityQueue[A] - def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } - def result(): PriorityQueue[A] = { pq.heapify(1); pq } - def clear(): Unit = pq.clear() - } - } - - def empty[sealed A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] - - def from[sealed E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = { - val b = newBuilder[E] - b ++= it - b.result() - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala deleted file mode 100644 index a578b0742009..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Queue.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking - - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * @define Coll `mutable.Queue` - * @define coll mutable queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) - extends ArrayDeque[A](array, start, end) - with IndexedSeqOps[A, Queue, Queue[A]] - with StrictOptimizedSeqOps[A, Queue, Queue[A]] - with IterableFactoryDefaults[A, Queue] - with ArrayDequeOps[A, Queue, Queue[A]] - with Cloneable[Queue[A]] - with DefaultSerializable { - - def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = - this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - - override def iterableFactory: SeqFactory[Queue] = Queue - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "Queue" - - /** - * Add elements to the end of this queue - * - * @param elem - * @return this - */ - def enqueue(elem: A): this.type = this += elem - - /** Enqueue two or more elements at the end of the queue. The last element - * of the sequence will be on end of the queue. - * - * @param elems the element sequence. - * @return this - */ - def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) - - /** Enqueues all elements in the given iterable object into the queue. The - * last element in the iterable object will be on front of the new queue. - * - * @param elems the iterable object. - * @return this - */ - def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems - - /** - * Removes the first element from this queue and returns it - * - * @return - * @throws NoSuchElementException when queue is empty - */ - def dequeue(): A = removeHead() - - /** Returns the first element in the queue which satisfies the - * given predicate, and removes this element from the queue. - * - * @param p the predicate used for choosing the first element - * @return the first element of the queue for which p yields true - */ - def dequeueFirst(p: A => Boolean): Option[A] = - removeFirst(p) - - /** Returns all elements in the queue which satisfy the - * given predicate, and removes those elements from the queue. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = - removeAll(p) - - /** - * Returns and dequeues all elements from the queue which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return The removed elements - */ - def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - @`inline` final def front: A = head - - override protected def klone(): Queue[A] = { - val bf = newSpecificBuilder - bf ++= this - bf.result() - } - - override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = - new Queue(array, start = 0, end) - -} - -/** - * $factoryInfo - * @define coll queue - * @define Coll `Queue` - */ -@SerialVersionUID(3L) -object Queue extends StrictOptimizedSeqFactory[Queue] { - - def from[sealed A](source: IterableOnce[A]^): Queue[A] = empty ++= source - - def empty[sealed A]: Queue[A] = new Queue - - def newBuilder[sealed A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala deleted file mode 100644 index 1f320f832cdf..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala +++ /dev/null @@ -1,653 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import scala.annotation.tailrec -import collection.{AbstractIterator, Iterator} -import java.lang.String -import language.experimental.captureChecking - -/** - * An object containing the red-black tree implementation used by mutable `TreeMaps`. - * - * The trees implemented in this object are *not* thread safe. - */ -private[collection] object RedBlackTree { - - // ---- class structure ---- - - // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. - // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. - // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) - // on the size of the range. - - final class Tree[sealed A, sealed B](var root: Node[A, B], var size: Int) { - def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) - } - - final class Node[sealed A, sealed B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { - override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" - } - - object Tree { - def empty[sealed A, sealed B]: Tree[A, B] = new Tree(null, 0) - } - - object Node { - - @`inline` def apply[sealed A, sealed B](key: A, value: B, red: Boolean, - left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = - new Node(key, value, red, left, right, parent) - - @`inline` def leaf[sealed A, sealed B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = - new Node(key, value, red, null, null, parent) - - def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) - } - - // ---- getters ---- - - def isRed(node: Node[_, _]) = (node ne null) && node.red - def isBlack(node: Node[_, _]) = (node eq null) || !node.red - - // ---- size ---- - - def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) - def size(tree: Tree[_, _]): Int = tree.size - def isEmpty(tree: Tree[_, _]) = tree.root eq null - def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } - - // ---- search ---- - - def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { - case null => None - case node => Some(node.value) - } - - @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = - if (node eq null) null - else { - val cmp = ord.compare(key, node.key) - if (cmp < 0) getNode(node.left, key) - else if (cmp > 0) getNode(node.right, key) - else node - } - - def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null - - def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { - case null => None - case node => Some((node.key, node.value)) - } - - def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { - case null => None - case node => Some(node.key) - } - - private def minNode[A, B](node: Node[A, B]): Node[A, B] = - if (node eq null) null else minNodeNonNull(node) - - @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = - if (node.left eq null) node else minNodeNonNull(node.left) - - def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { - case null => None - case node => Some((node.key, node.value)) - } - - def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { - case null => None - case node => Some(node.key) - } - - private def maxNode[A, B](node: Node[A, B]): Node[A, B] = - if (node eq null) null else maxNodeNonNull(node) - - @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = - if (node.right eq null) node else maxNodeNonNull(node.right) - - /** - * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such - * node. - */ - def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = - minNodeAfter(tree.root, key) match { - case null => None - case node => Some((node.key, node.value)) - } - - def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = - minNodeAfter(tree.root, key) match { - case null => None - case node => Some(node.key) - } - - private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { - if (node eq null) null - else { - var y: Node[A, B] = null - var x = node - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - if (cmp <= 0) y else successor(y) - } - } - - /** - * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. - */ - def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = - maxNodeBefore(tree.root, key) match { - case null => None - case node => Some((node.key, node.value)) - } - - def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = - maxNodeBefore(tree.root, key) match { - case null => None - case node => Some(node.key) - } - - private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { - if (node eq null) null - else { - var y: Node[A, B] = null - var x = node - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - if (cmp > 0) y else predecessor(y) - } - } - - // ---- insertion ---- - - def insert[sealed A, sealed B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { - var y: Node[A, B] = null - var x = tree.root - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - - if (cmp == 0) y.value = value - else { - val z = Node.leaf(key, value, red = true, y) - - if (y eq null) tree.root = z - else if (cmp < 0) y.left = z - else y.right = z - - fixAfterInsert(tree, z) - tree.size += 1 - } - } - - private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { - var z = node - while (isRed(z.parent)) { - if (z.parent eq z.parent.parent.left) { - val y = z.parent.parent.right - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.right) { - z = z.parent - rotateLeft(tree, z) - } - z.parent.red = false - z.parent.parent.red = true - rotateRight(tree, z.parent.parent) - } - } else { // symmetric cases - val y = z.parent.parent.left - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.left) { - z = z.parent - rotateRight(tree, z) - } - z.parent.red = false - z.parent.parent.red = true - rotateLeft(tree, z.parent.parent) - } - } - } - tree.root.red = false - } - - // ---- deletion ---- - - def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { - val z = getNode(tree.root, key) - if (z ne null) { - var y = z - var yIsRed = y.red - var x: Node[A, B] = null - var xParent: Node[A, B] = null - - if (z.left eq null) { - x = z.right - transplant(tree, z, z.right) - xParent = z.parent - } - else if (z.right eq null) { - x = z.left - transplant(tree, z, z.left) - xParent = z.parent - } - else { - y = minNodeNonNull(z.right) - yIsRed = y.red - x = y.right - - if (y.parent eq z) xParent = y - else { - xParent = y.parent - transplant(tree, y, y.right) - y.right = z.right - y.right.parent = y - } - transplant(tree, z, y) - y.left = z.left - y.left.parent = y - y.red = z.red - } - - if (!yIsRed) fixAfterDelete(tree, x, xParent) - tree.size -= 1 - } - } - - private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { - var x = node - var xParent = parent - while ((x ne tree.root) && isBlack(x)) { - if (x eq xParent.left) { - var w = xParent.right - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - rotateLeft(tree, xParent) - w = xParent.right - } - if (isBlack(w.left) && isBlack(w.right)) { - w.red = true - x = xParent - } else { - if (isBlack(w.right)) { - w.left.red = false - w.red = true - rotateRight(tree, w) - w = xParent.right - } - w.red = xParent.red - xParent.red = false - w.right.red = false - rotateLeft(tree, xParent) - x = tree.root - } - } else { // symmetric cases - var w = xParent.left - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - rotateRight(tree, xParent) - w = xParent.left - } - if (isBlack(w.right) && isBlack(w.left)) { - w.red = true - x = xParent - } else { - if (isBlack(w.left)) { - w.right.red = false - w.red = true - rotateLeft(tree, w) - w = xParent.left - } - w.red = xParent.red - xParent.red = false - w.left.red = false - rotateRight(tree, xParent) - x = tree.root - } - } - xParent = x.parent - } - if (x ne null) x.red = false - } - - // ---- helpers ---- - - /** - * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, - * therefore, the last node), this method returns `null`. - */ - private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { - if (node.right ne null) minNodeNonNull(node.right) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.right)) { - x = y - y = y.parent - } - y - } - } - - /** - * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, - * therefore, the first node), this method returns `null`. - */ - private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { - if (node.left ne null) maxNodeNonNull(node.left) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.left)) { - x = y - y = y.parent - } - y - } - } - - private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { - // assert(x.right ne null) - val y = x.right - x.right = y.left - - if (y.left ne null) y.left.parent = x - y.parent = x.parent - - if (x.parent eq null) tree.root = y - else if (x eq x.parent.left) x.parent.left = y - else x.parent.right = y - - y.left = x - x.parent = y - } - - private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { - // assert(x.left ne null) - val y = x.left - x.left = y.right - - if (y.right ne null) y.right.parent = x - y.parent = x.parent - - if (x.parent eq null) tree.root = y - else if (x eq x.parent.right) x.parent.right = y - else x.parent.left = y - - y.right = x - x.parent = y - } - - /** - * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous - * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. - */ - private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { - if (to.parent eq null) tree.root = from - else if (to eq to.parent.left) to.parent.left = from - else to.parent.right = from - - if (from ne null) from.parent = to.parent - } - - // ---- tree traversal ---- - - def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) - - private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = - if (node ne null) foreachNodeNonNull(node, f) - - private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { - if (node.left ne null) foreachNodeNonNull(node.left, f) - f((node.key, node.value)) - if (node.right ne null) foreachNodeNonNull(node.right, f) - } - - def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { - def g(node: Node[A, _]): Unit = { - val l = node.left - if(l ne null) g(l) - f(node.key) - val r = node.right - if(r ne null) g(r) - } - val r = tree.root - if(r ne null) g(r) - } - - def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { - def g(node: Node[A, B]): Unit = { - val l = node.left - if(l ne null) g(l) - f(node.key, node.value) - val r = node.right - if(r ne null) g(r) - } - val r = tree.root - if(r ne null) g(r) - } - - def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) - - private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = - if (node ne null) transformNodeNonNull(node, f) - - private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { - if (node.left ne null) transformNodeNonNull(node.left, f) - node.value = f(node.key, node.value) - if (node.right ne null) transformNodeNonNull(node.right, f) - } - - def iterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = - new EntriesIterator(tree, start, end) - - def keysIterator[sealed A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = - new KeysIterator(tree, start, end) - - def valuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = - new ValuesIterator(tree, start, end) - - private[this] abstract class TreeIterator[sealed A, sealed B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) - (implicit ord: Ordering[A]) extends AbstractIterator[R] { - - protected def nextResult(node: Node[A, B]): R - - def hasNext: Boolean = nextNode ne null - - @throws[NoSuchElementException] - def next(): R = nextNode match { - case null => throw new NoSuchElementException("next on empty iterator") - case node => - nextNode = successor(node) - setNullIfAfterEnd() - nextResult(node) - } - - private[this] var nextNode: Node[A, B] = start match { - case None => minNode(tree.root) - case Some(from) => minNodeAfter(tree.root, from) - } - - private[this] def setNullIfAfterEnd(): Unit = - if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) - nextNode = null - - setNullIfAfterEnd() - } - - private[this] final class EntriesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, (A, B)](tree, start, end) { - - def nextResult(node: Node[A, B]) = (node.key, node.value) - } - - private[this] final class KeysIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, A](tree, start, end) { - - def nextResult(node: Node[A, B]) = node.key - } - - private[this] final class ValuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, B](tree, start, end) { - - def nextResult(node: Node[A, B]) = node.value - } - - // ---- debugging ---- - - /** - * Checks if the tree is in a valid state. That happens if: - * - It is a valid binary search tree; - * - All red-black properties are satisfied; - * - All non-null nodes have their `parent` reference correct; - * - The size variable in `tree` corresponds to the actual size of the tree. - */ - def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = - isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size - - /** - * Returns true if all non-null nodes have their `parent` reference correct. - */ - private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { - - def hasProperParentRefs(node: Node[A, B]): Boolean = { - if (node eq null) true - else { - if ((node.left ne null) && (node.left.parent ne node) || - (node.right ne null) && (node.right.parent ne node)) false - else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) - } - } - - if(tree.root eq null) true - else (tree.root.parent eq null) && hasProperParentRefs(tree.root) - } - - /** - * Returns true if this node follows the properties of a binary search tree. - */ - private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { - if (node eq null) true - else { - if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || - (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false - else isValidBST(node.left) && isValidBST(node.right) - } - } - - /** - * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red - * nodes are black and if the path from any node to any of its null children has the same number of black nodes. - */ - private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { - - def noRedAfterRed(node: Node[A, B]): Boolean = { - if (node eq null) true - else if (node.red && (isRed(node.left) || isRed(node.right))) false - else noRedAfterRed(node.left) && noRedAfterRed(node.right) - } - - def blackHeight(node: Node[A, B]): Int = { - if (node eq null) 1 - else { - val lh = blackHeight(node.left) - val rh = blackHeight(node.right) - - if (lh == -1 || lh != rh) -1 - else if (isRed(node)) lh - else lh + 1 - } - } - - isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 - } - - // building - - /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ - def fromOrderedKeys[sealed A](xs: Iterator[A], size: Int): Tree[A, Null] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Node[A, Null] = size match { - case 0 => null - case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val x = xs.next() - val right = f(level+1, size-1-leftSize) - val n = new Node(x, null, false, left, right, null) - if(left ne null) left.parent = n - right.parent = n - n - } - new Tree(f(1, size), size) - } - - /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ - def fromOrderedEntries[sealed A, sealed B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Node[A, B] = size match { - case 0 => null - case 1 => - val (k, v) = xs.next() - new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val (k, v) = xs.next() - val right = f(level+1, size-1-leftSize) - val n = new Node(k, v, false, left, right, null) - if(left ne null) left.parent = n - right.parent = n - n - } - new Tree(f(1, size), size) - } - - def copyTree[sealed A, sealed B](n: Node[A, B]): Node[A, B] = - if(n eq null) null else { - val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) - if(c.left != null) c.left.parent = c - if(c.right != null) c.right.parent = c - c - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala deleted file mode 100644 index 246e525e37d9..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import language.experimental.captureChecking - -/** `ReusableBuilder` is a marker trait that indicates that a `Builder` - * can be reused to build more than one instance of a collection. In - * particular, calling `result()` followed by `clear()` will produce a - * collection and reset the builder to begin building a new collection - * of the same type. - * - * In general no method other than `clear()` may be called after `result()`. - * It is up to subclasses to implement and to document other allowed sequences - * of operations (e.g. calling other methods after `result()` in order to obtain - * different snapshots of a collection under construction). - * - * @tparam Elem the type of elements that get added to the builder. - * @tparam To the type of collection that it produced. - * - * @define multipleResults - * - * This Builder can be reused after calling `result()` without an - * intermediate call to `clear()` in order to build multiple related results. - */ -trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { - /** Clears the contents of this builder. - * After execution of this method, the builder will contain no elements. - * - * If executed immediately after a call to `result()`, this allows a new - * instance of the same type of collection to be built. - */ - override def clear(): Unit // Note: overriding for Scaladoc only! - - /** Produces a collection from the added elements. - * - * After a call to `result`, the behavior of all other methods is undefined - * save for `clear()`. If `clear()` is called, then the builder is reset and - * may be used to build another instance. - * - * @return a collection containing the elements added to this builder. - */ - override def result(): To // Note: overriding for Scaladoc only! -} diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala deleted file mode 100644 index 01384e993e89..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Set.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable - -import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} -import language.experimental.captureChecking - -/** Base trait for mutable sets */ -trait Set[A] - extends Iterable[A] - with collection.Set[A] - with SetOps[A, Set, Set[A]] - with IterableFactoryDefaults[A, Set] { - - override def iterableFactory: IterableFactory[Set] = Set -} - -/** - * @define coll mutable set - * @define Coll `mutable.Set` - */ -trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] - extends collection.SetOps[A, CC, C] - with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below - with Cloneable[C] - with Builder[A, C] - with Growable[A] - with Shrinkable[A] { - - def result(): C = coll - - /** Check whether the set contains the given element, and add it if not. - * - * @param elem the element to be added - * @return true if the element was added - */ - def add(elem: A): Boolean = - !contains(elem) && { - coll += elem; true - } - - /** Updates the presence of a single element in this set. - * - * This method allows one to add or remove an element `elem` - * from this set depending on the value of parameter `included`. - * Typically, one would use the following syntax: - * {{{ - * set(elem) = true // adds element - * set(elem) = false // removes element - * }}} - * - * @param elem the element to be added or removed - * @param included a flag indicating whether element should be included or excluded. - */ - def update(elem: A, included: Boolean): Unit = { - if (included) add(elem) - else remove(elem) - } - - /** Removes an element from this set. - * - * @param elem the element to be removed - * @return true if this set contained the element before it was removed - */ - def remove(elem: A): Boolean = { - val res = contains(elem) - coll -= elem - res - } - - def diff(that: collection.Set[A]): C = - foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) - - @deprecated("Use filterInPlace instead", "2.13.0") - @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) - - /** Removes all elements from the set for which do not satisfy a predicate. - * @param p the predicate used to test elements. Only elements for - * which `p` returns `true` are retained in the set; all others - * are removed. - */ - def filterInPlace(p: A => Boolean): this.type = { - if (nonEmpty) { - val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val elem = array(i).asInstanceOf[A] - if (!p(elem)) { - this -= elem - } - i += 1 - } - } - this - } - - override def clone(): C = empty ++= this - - override def knownSize: Int = super[IterableOps].knownSize -} - -/** - * $factoryInfo - * @define coll mutable set - * @define Coll `mutable.Set` - */ -@SerialVersionUID(3L) -object Set extends IterableFactory.Delegate[Set](HashSet) - - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala deleted file mode 100644 index 8017177f5720..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} -import language.experimental.captureChecking - -/** - * Base type for mutable sorted map collections - */ -trait SortedMap[K, V] - extends collection.SortedMap[K, V] - with Map[K, V] - with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] - with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { - - override def unsorted: Map[K, V] = this - - override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - - /** The same sorted map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) -} - -trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends collection.SortedMapOps[K, V, CC, C] - with MapOps[K, V, Map, C] { - - def unsorted: Map[K, V] - - @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") - override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = - clone().asInstanceOf[CC[K, V1]].addOne((key, value)) -} - -@SerialVersionUID(3L) -object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - - @SerialVersionUID(3L) - final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V) - extends Map.WithDefault[K, V](underlying, defaultValue) - with SortedMap[K, V] - with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] - with Serializable { - - override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory - - def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) - - def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) - - implicit def ordering: Ordering[K] = underlying.ordering - - def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = - new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) - - // Need to override following methods to match type signatures of `SortedMap.WithDefault` - // for operations preserving default value - override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } - - override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] = - underlying.concat(suffix).withDefault(defaultValue) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = - new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = - SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala deleted file mode 100644 index e657fb749d7d..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable -import language.experimental.captureChecking - -/** - * Base type for mutable sorted set collections - */ -trait SortedSet[A] - extends Set[A] - with collection.SortedSet[A] - with SortedSetOps[A, SortedSet, SortedSet[A]] - with SortedSetFactoryDefaults[A, SortedSet, Set] { - - override def unsorted: Set[A] = this - - override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet -} - -/** - * @define coll mutable sorted set - * @define Coll `mutable.Sortedset` - */ -trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SetOps[A, Set, C] - with collection.SortedSetOps[A, CC, C] { - - def unsorted: Set[A] -} - -/** - * $factoryInfo - * @define coll mutable sorted set - * @define Coll `mutable.Sortedset` - */ -@SerialVersionUID(3L) -object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala deleted file mode 100644 index 4efa9621f374..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Stack.scala +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable - -import scala.annotation.{migration, nowarn} -import scala.collection.generic.DefaultSerializable -import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} - -import language.experimental.captureChecking - -/** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * Note that operations which consume and produce iterables preserve order, - * rather than reversing it (as would be expected from building a new stack - * by pushing an element at a time). - * - * @tparam A type of the elements contained in this stack. - * - * @define Coll `Stack` - * @define coll stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") -class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) - extends ArrayDeque[A](array, start, end) - with IndexedSeqOps[A, Stack, Stack[A]] - with StrictOptimizedSeqOps[A, Stack, Stack[A]] - with IterableFactoryDefaults[A, Stack] - with ArrayDequeOps[A, Stack, Stack[A]] - with Cloneable[Stack[A]] - with DefaultSerializable { - - def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = - this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - - override def iterableFactory: SeqFactory[Stack] = Stack - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "Stack" - - /** - * Add elements to the top of this stack - * - * @param elem - * @return - */ - def push(elem: A): this.type = prepend(elem) - - /** Push two or more elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elems the element sequence. - * @return the stack with the new elements on top. - */ - def push(elem1: A, elem2: A, elems: A*): this.type = { - val k = elems.knownSize - ensureSize(length + (if(k >= 0) k + 2 else 3)) - prepend(elem1).prepend(elem2).pushAll(elems) - } - - /** Push all elements in the given iterable object onto the stack. The - * last element in the iterable object will be on top of the new stack. - * - * @param elems the iterable object. - * @return the stack with the new elements on top. - */ - def pushAll(elems: scala.collection.IterableOnce[A]): this.type = - prependAll(elems match { - case it: scala.collection.Seq[A] => it.view.reverse - case it => IndexedSeq.from(it).view.reverse - }) - - /** - * Removes the top element from this stack and return it - * - * @return - * @throws NoSuchElementException when stack is empty - */ - def pop(): A = removeHead() - - /** - * Pop all elements from this stack and return it - * - * @return The removed elements - */ - def popAll(): scala.collection.Seq[A] = removeAll() - - /** - * Returns and removes all elements from the top of this stack which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return The removed elements - */ - def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @throws NoSuchElementException - * @return the top element - */ - @`inline` final def top: A = head - - override protected def klone(): Stack[A] = { - val bf = newSpecificBuilder - bf ++= this - bf.result() - } - - override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = - new Stack(array, start = 0, end) - -} - -/** - * $factoryInfo - * @define coll stack - * @define Coll `Stack` - */ -@SerialVersionUID(3L) -object Stack extends StrictOptimizedSeqFactory[Stack] { - - def from[sealed A](source: IterableOnce[A]^): Stack[A] = empty ++= source - - def empty[sealed A]: Stack[A] = new Stack - - def newBuilder[sealed A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index 5320fa1dabb0..c7859214821d 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -110,7 +110,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr override def toString: String = result() - override def toArray[sealed B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = ct.runtimeClass match { case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] case _ => super.toArray diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala deleted file mode 100644 index f714a9ed46c2..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{RedBlackTree => RB} -import language.experimental.captureChecking - -/** - * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -sealed class TreeMap[sealed K, sealed V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) - extends AbstractMap[K, V] - with SortedMap[K, V] - with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] - with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] - with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] - with DefaultSerializable { - - override def sortedMapFactory = TreeMap - - /** - * Creates an empty `TreeMap`. - * @param ord the implicit ordering used to compare objects of type `K`. - * @return an empty `TreeMap`. - */ - def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) - - def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else RB.iterator(tree) - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else RB.keysIterator(tree, None) - } - - override def valuesIterator: Iterator[V] = { - if (isEmpty) Iterator.empty - else RB.valuesIterator(tree, None) - } - - def keysIteratorFrom(start: K): Iterator[K] = { - if (isEmpty) Iterator.empty - else RB.keysIterator(tree, Some(start)) - } - - def iteratorFrom(start: K): Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else RB.iterator(tree, Some(start)) - } - - override def valuesIteratorFrom(start: K): Iterator[V] = { - if (isEmpty) Iterator.empty - else RB.valuesIterator(tree, Some(start)) - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape.parUnbox( - scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( - size, tree.root, _.left, _.right, x => (x.key, x.value) - ) - ) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) - } - s.asInstanceOf[S with EfficientSplit] - } - - def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } - - def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } - - override def clear(): Unit = RB.clear(tree) - - def get(key: K): Option[V] = RB.get(tree, key) - - /** - * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and - * vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) - - override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) - override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) - - override def size: Int = RB.size(tree) - override def knownSize: Int = size - override def isEmpty: Boolean = RB.isEmpty(tree) - - override def contains(key: K): Boolean = RB.contains(tree, key) - - override def head: (K, V) = RB.min(tree).get - - override def last: (K, V) = RB.max(tree).get - - override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) - - override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) - - override protected[this] def className: String = "TreeMap" - - - /** - * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: K): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = - new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) - - override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None - - override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) - override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) - override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) - override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) - override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) - override def size = if (RB.size(tree) == 0) 0 else iterator.length - override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 - override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext - override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def head = headOption.get - override def headOption = { - val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) - (entry, until) match { - case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None - case _ => entry - } - } - - override def last = lastOption.get - override def lastOption = { - val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) - (entry, from) match { - case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None - case _ => entry - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized - // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) - - override def clone() = super.clone().rangeImpl(from, until) - } - -} - -/** - * $factoryInfo - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -@SerialVersionUID(3L) -object TreeMap extends SortedMapFactory[TreeMap] { - - def from[sealed K : Ordering, sealed V](it: IterableOnce[(K, V)]^): TreeMap[K, V] = - Growable.from(empty[K, V], it) - - def empty[sealed K : Ordering, sealed V]: TreeMap[K, V] = new TreeMap[K, V]() - - def newBuilder[sealed K: Ordering, sealed V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala deleted file mode 100644 index 9ba439bea041..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{RedBlackTree => RB} -import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} -import language.experimental.captureChecking - -/** - * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam A the type of the keys contained in this tree set. - * - * @define Coll mutable.TreeSet - * @define coll mutable tree set - */ -// Original API designed in part by Lucien Pereira -sealed class TreeSet[sealed A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) - extends AbstractSet[A] - with SortedSet[A] - with SortedSetOps[A, TreeSet, TreeSet[A]] - with StrictOptimizedIterableOps[A, Set, TreeSet[A]] - with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] - with SortedSetFactoryDefaults[A, TreeSet, Set] - with DefaultSerializable { - - if (ordering eq null) - throw new NullPointerException("ordering must not be null") - - /** - * Creates an empty `TreeSet`. - * @param ord the implicit ordering used to compare objects of type `A`. - * @return an empty `TreeSet`. - */ - def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) - - override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet - - def iterator: collection.Iterator[A] = RB.keysIterator(tree) - - def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[A, Null] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - def addOne(elem: A): this.type = { - RB.insert(tree, elem, null) - this - } - - def subtractOne(elem: A): this.type = { - RB.delete(tree, elem) - this - } - - def clear(): Unit = RB.clear(tree) - - def contains(elem: A): Boolean = RB.contains(tree, elem) - - def unconstrained: collection.Set[A] = this - - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) - - override protected[this] def className: String = "TreeSet" - - override def size: Int = RB.size(tree) - override def knownSize: Int = size - override def isEmpty: Boolean = RB.isEmpty(tree) - - override def head: A = RB.minKey(tree).get - - override def last: A = RB.maxKey(tree).get - - override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) - - override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) - - override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - - - /** - * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. - * - * Only keys between this projection's key range will ever appear as elements of this set, independently of whether - * the elements are added through the original set or through this view. That means that if one inserts an element in - * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. - * Mutations are always reflected in the original set, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: A): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = - new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) - - override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def iterator = RB.keysIterator(tree, from, until) - override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - - override def size = if (RB.size(tree) == 0) 0 else iterator.length - override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 - override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext - - override def head: A = headOption.get - override def headOption: Option[A] = { - val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) - (elem, until) match { - case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None - case _ => elem - } - } - - override def last: A = lastOption.get - override def lastOption = { - val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) - (elem, from) match { - case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None - case _ => elem - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized - // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: A => U): Unit = iterator.foreach(f) - - override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) - - } - -} - -/** - * $factoryInfo - * @define Coll `mutable.TreeSet` - * @define coll mutable tree set - */ -@SerialVersionUID(3L) -object TreeSet extends SortedIterableFactory[TreeSet] { - - def empty[sealed A : Ordering]: TreeSet[A] = new TreeSet[A]() - - def from[sealed E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = - it match { - case ts: TreeSet[E] if ordering == ts.ordering => - new TreeSet[E](ts.tree.treeCopy()) - case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => - new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) - case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => - val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator - new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) - case _ => - val t: RB.Tree[E, Null] = RB.Tree.empty - val i = it.iterator - while (i.hasNext) RB.insert(t, i.next(), null) - new TreeSet[E](t) - } - - def newBuilder[sealed A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { - private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty - def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } - def result(): TreeSet[A] = new TreeSet[A](tree) - def clear(): Unit = { tree = RB.Tree.empty } - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala deleted file mode 100644 index 2015b76a31b8..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala +++ /dev/null @@ -1,443 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.tailrec -import scala.collection.generic.DefaultSerializable -import scala.reflect.ClassTag -import scala.collection.immutable.Nil -import language.experimental.captureChecking - -/** A buffer that stores elements in an unrolled linked list. - * - * Unrolled linked lists store elements in linked fixed size - * arrays. - * - * Unrolled buffers retain locality and low memory overhead - * properties of array buffers, but offer much more efficient - * element addition, since they never reallocate and copy the - * internal array. - * - * However, they provide `O(n/m)` complexity random access, - * where `n` is the number of elements, and `m` the size of - * internal array chunks. - * - * Ideal to use when: - * - elements are added to the buffer and then all of the - * elements are traversed sequentially - * - two unrolled buffers need to be concatenated (see `concat`) - * - * Better than singly linked lists for random access, but - * should still be avoided for such a purpose. - * - * @define coll unrolled buffer - * @define Coll `UnrolledBuffer` - * - */ -@SerialVersionUID(3L) -sealed class UnrolledBuffer[sealed T](implicit val tag: ClassTag[T]) - extends AbstractBuffer[T] - with Buffer[T] - with Seq[T] - with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] - with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] - with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] - with Builder[T, UnrolledBuffer[T]] - with DefaultSerializable { - - import UnrolledBuffer.Unrolled - - @transient private var headptr = newUnrolled - @transient private var lastptr = headptr - @transient private var sz = 0 - - private[collection] def headPtr = headptr - private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head - private[collection] def lastPtr = lastptr - private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last - private[collection] def size_=(s: Int) = sz = s - - protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer - protected def iterableEvidence: ClassTag[T] = tag - - override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged - - protected def newUnrolled = new Unrolled[T](this) - - // The below would allow more flexible behavior without requiring inheritance - // that is risky because all the important internals are private. - // private var myLengthPolicy: Int => Int = x => x - // - // /** Specifies how the array lengths should vary. - // * - // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length - // * policy can be given that changes this scheme to, for instance, an - // * exponential growth. - // * - // * @param nextLength computes the length of the next array from the length of the latest one - // */ - // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } - private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) - - def classTagCompanion = UnrolledBuffer - - /** Concatenates the target unrolled buffer to this unrolled buffer. - * - * The specified buffer `that` is cleared after this operation. This is - * an O(1) operation. - * - * @param that the unrolled buffer whose elements are added to this buffer - */ - def concat(that: UnrolledBuffer[T]) = { - // bind the two together - if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr - - // update size - sz += that.sz - - // `that` is no longer usable, so clear it - // here we rely on the fact that `clear` allocates - // new nodes instead of modifying the previous ones - that.clear() - - // return a reference to this - this - } - - def addOne(elem: T) = { - lastptr = lastptr.append(elem) - sz += 1 - this - } - - def clear(): Unit = { - headptr = newUnrolled - lastptr = headptr - sz = 0 - } - - def iterator: Iterator[T] = new AbstractIterator[T] { - var pos: Int = -1 - var node: Unrolled[T] = headptr - scan() - - private def scan(): Unit = { - pos += 1 - while (pos >= node.size) { - pos = 0 - node = node.next - if (node eq null) return - } - } - def hasNext = node ne null - def next() = if (hasNext) { - val r = node.array(pos) - scan() - r - } else Iterator.empty.next() - } - - // this should be faster than the iterator - override def foreach[U](f: T => U) = headptr.foreach(f) - - def result() = this - - def length = sz - - override def knownSize: Int = sz - - def apply(idx: Int) = - if (idx >= 0 && idx < sz) headptr(idx) - else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - def update(idx: Int, newelem: T) = - if (idx >= 0 && idx < sz) headptr(idx) = newelem - else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - def mapInPlace(f: T => T): this.type = { - headptr.mapInPlace(f) - this - } - - def remove(idx: Int) = - if (idx >= 0 && idx < sz) { - sz -= 1 - headptr.remove(idx, this) - } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - @tailrec final def remove(idx: Int, count: Int): Unit = - if (count > 0) { - remove(idx) - remove(idx, count-1) - } - - def prepend(elem: T) = { - headptr = headptr prepend elem - sz += 1 - this - } - - def insert(idx: Int, elem: T): Unit = - insertAll(idx, elem :: Nil) - - def insertAll(idx: Int, elems: IterableOnce[T]^): Unit = - if (idx >= 0 && idx <= sz) { - sz += headptr.insertAll(idx, elems, this) - } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - override def subtractOne(elem: T): this.type = { - if (headptr.subtractOne(elem, this)) { - sz -= 1 - } - this - } - - def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = { - remove(from, replaced) - insertAll(from, patch) - this - } - - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject - out writeInt sz - for (elem <- this) out writeObject elem - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject - - val num = in.readInt - - headPtr = newUnrolled - lastPtr = headPtr - sz = 0 - var i = 0 - while (i < num) { - this += in.readObject.asInstanceOf[T] - i += 1 - } - } - - override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this - - override protected[this] def className = "UnrolledBuffer" -} - - -@SerialVersionUID(3L) -object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => - - val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) - - def empty[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] - - def from[sealed A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source) - - def newBuilder[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] - - final val waterline: Int = 50 - - final def waterlineDenom: Int = 100 - - @deprecated("Use waterlineDenom instead.", "2.13.0") - final val waterlineDelim: Int = waterlineDenom - - private[collection] val unrolledlength = 32 - - /** Unrolled buffer node. - */ - class Unrolled[sealed T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { - private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) - private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) - - private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) - - // adds and returns itself or the new unrolled if full - @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { - array(size) = elem - size += 1 - this - } else { - next = new Unrolled[T](0, new Array[T](nextlength), null, buff) - next append elem - } - def foreach[U](f: T => U): Unit = { - var unrolled = this - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - f(elem) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - } - def mapInPlace(f: T => T): Unit = { - var unrolled = this - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - chunkarr(i) = f(elem) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - } - @tailrec final def apply(idx: Int): T = - if (idx < size) array(idx) else next.apply(idx - size) - @tailrec final def update(idx: Int, newelem: T): Unit = - if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) - @tailrec final def locate(idx: Int): Unrolled[T] = - if (idx < size) this else next.locate(idx - size) - def prepend(elem: T) = if (size < array.length) { - // shift the elements of the array right - // then insert the element - shiftright() - array(0) = elem - size += 1 - this - } else { - // allocate a new node and store element - // then make it point to this - val newhead = new Unrolled[T](buff) - newhead append elem - newhead.next = this - newhead - } - // shifts right assuming enough space - private def shiftright(): Unit = { - var i = size - 1 - while (i >= 0) { - array(i + 1) = array(i) - i -= 1 - } - } - // returns pointer to new last if changed - @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = - if (idx < size) { - // remove the element - // then try to merge with the next bucket - val r = array(idx) - shiftleft(idx) - size -= 1 - if (tryMergeWithNext()) buffer.lastPtr = this - r - } else next.remove(idx - size, buffer) - - @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { - var i = 0 - while (i < size) { - if(array(i) == elem) { - remove(i, buffer) - return true - } - i += 1 - } - if(next ne null) next.subtractOne(elem, buffer) else false - } - - // shifts left elements after `leftb` (overwrites `leftb`) - private def shiftleft(leftb: Int): Unit = { - var i = leftb - while (i < (size - 1)) { - array(i) = array(i + 1) - i += 1 - } - nullout(i, i + 1) - } - protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { - // copy the next array, then discard the next node - Array.copy(next.array, 0, array, size, next.size) - size = size + next.size - next = next.next - if (next eq null) true else false // checks if last node was thrown out - } else false - - @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = { - if (idx < size) { - // divide this node at the appropriate position and insert all into head - // update new next - val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) - Array.copy(array, idx, newnextnode.array, 0, size - idx) - newnextnode.size = size - idx - newnextnode.next = next - - // update this - nullout(idx, size) - size = idx - next = null - - // insert everything from iterable to this - var curr = this - var appended = 0 - for (elem <- t.iterator) { - curr = curr append elem - appended += 1 - } - curr.next = newnextnode - - // try to merge the last node of this with the newnextnode and fix tail pointer if needed - if (curr.tryMergeWithNext()) buffer.lastPtr = curr - else if (newnextnode.next eq null) buffer.lastPtr = newnextnode - appended - } - else if (idx == size || (next eq null)) { - var curr = this - var appended = 0 - for (elem <- t.iterator) { - curr = curr append elem - appended += 1 - } - appended - } - else next.insertAll(idx - size, t, buffer) - } - - private def nullout(from: Int, until: Int): Unit = { - var idx = from - while (idx < until) { - array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! - idx += 1 - } - } - - // assumes this is the last node - // `thathead` and `thatlast` are head and last node - // of the other unrolled list, respectively - def bind(thathead: Unrolled[T]) = { - assert(next eq null) - next = thathead - tryMergeWithNext() - } - - override def toString: String = - array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") - } -} - -// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: -// Todo -- revisit whether inheritance is the best way to achieve this functionality -private[collection] class DoublingUnrolledBuffer[sealed T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { - override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz - override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) -} diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala deleted file mode 100644 index a9498b7fc69b..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} -import language.experimental.captureChecking - -/** A hash map with references to entries which are weakly reachable. Entries are - * removed from this map when the key is no longer (strongly) referenced. This class wraps - * `java.util.WeakHashMap`. - * - * @tparam K type of keys contained in this map - * @tparam V type of values associated with the keys - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] - * section on `Weak Hash Maps` for more information. - * - * @define Coll `WeakHashMap` - * @define coll weak hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3L) -class WeakHashMap[sealed K, sealed V] extends JMapWrapper[K, V](new java.util.WeakHashMap) - with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] - with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { - override def empty = new WeakHashMap[K, V] - override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "WeakHashMap" -} - -/** $factoryInfo - * @define Coll `WeakHashMap` - * @define coll weak hash map - */ -@SerialVersionUID(3L) -object WeakHashMap extends MapFactory[WeakHashMap] { - def empty[sealed K, sealed V]: WeakHashMap[K,V] = new WeakHashMap[K, V] - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[sealed K, sealed V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) -} - diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala deleted file mode 100644 index d658ca5bc65a..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/package.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - - -package object mutable { - @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") - type WrappedArray[X] = ArraySeq[X] - @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") - val WrappedArray = ArraySeq - @deprecated("Use Iterable instead of Traversable", "2.13.0") - type Traversable[X] = Iterable[X] - @deprecated("Use Iterable instead of Traversable", "2.13.0") - val Traversable = Iterable - @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") - type ArrayStack[X] = Stack[X] - @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") - val ArrayStack = Stack - - @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") - type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] - - @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") - type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] - - @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") - type IndexedOptimizedSeq[A] = IndexedSeq[A] - - @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") - type IndexedOptimizedBuffer[A] = IndexedBuffer[A] -} diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala deleted file mode 100644 index ad4686be1fb2..000000000000 --- a/tests/pos-special/stdlib/collection/package.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -import language.experimental.captureChecking - -package object collection { - @deprecated("Use Iterable instead of Traversable", "2.13.0") - type Traversable[+X] = Iterable[X] - @deprecated("Use Iterable instead of Traversable", "2.13.0") - val Traversable = Iterable - @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") - type TraversableOnce[+X] = IterableOnce[X] - @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") - val TraversableOnce = IterableOnce - @deprecated("Use SeqOps instead of SeqLike", "2.13.0") - type SeqLike[A, T] = SeqOps[A, Seq, T] - @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") - type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] - - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenTraversableOnce[+X] = IterableOnce[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenTraversableOnce = IterableOnce - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenTraversable[+X] = Iterable[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenTraversable = Iterable - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenIterable[+X] = Iterable[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenIterable = Iterable - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenSeq[+X] = Seq[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenSeq = Seq - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenSet[X] = Set[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenSet = Set - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenMap[K, +V] = Map[K, V] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenMap = Map - - /** Needed to circumvent a difficulty between dotty and scalac concerning - * the right top type for a type parameter of kind * -> *. - * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. - * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. - */ - private[scala] type AnyConstr[X] = Any - - /** An extractor used to head/tail deconstruct sequences. */ - object +: { - /** Splits a sequence into head +: tail. - * @return Some((head, tail)) if sequence is non-empty. None otherwise. - */ - def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = - if(t.isEmpty) None - else Some(t.head -> t.tail) - } - - /** An extractor used to init/last deconstruct sequences. */ - object :+ { - /** Splits a sequence into init :+ last. - * @return Some((init, last)) if sequence is non-empty. None otherwise. - */ - def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = - if(t.isEmpty) None - else Some(t.init -> t.last) - } -} From fbb7d7114898179541b3ef6277d6677dd26523ee Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 15:01:58 +0100 Subject: [PATCH 143/216] Rename annotation source file --- .../unchecked/uncheckedCapabilityLeaks.scala | 12 ------------ .../annotation/unchecked/uncheckedCaptures.scala | 12 ++++++++++++ 2 files changed, 12 insertions(+), 12 deletions(-) create mode 100644 library/src/scala/annotation/unchecked/uncheckedCaptures.scala diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala index 477ac6d742f7..e69de29bb2d1 100644 --- a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -1,12 +0,0 @@ -package scala.annotation -package unchecked - -/** An annotation for mutable variables that are allowed to capture - * the root capability `cap`. Allowing this is not capture safe since - * it can cause leakage of capabilities from local scopes by assigning - * values retaining such capabilties to the annotated variable in - * an outer scope. - */ -class uncheckedCaptures extends StaticAnnotation - - diff --git a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala new file mode 100644 index 000000000000..477ac6d742f7 --- /dev/null +++ b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + From d85db26ab0c49ad8f67f5628bcba1c970242b569 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 18:30:51 +0100 Subject: [PATCH 144/216] Also count @Sealed annotated abstract types as sealed Also count abstract types that have a @Sealed annotation on their bound as sealed. That way, we get free propagation into synthesized type parameters. We should probably unify this scheme and `sealed` modifiers. --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index fab0689b4df2..c607e85e661d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -163,7 +163,8 @@ object CheckCaptures: capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") t.info match case TypeBounds(_, hi) - if !t.symbol.is(Sealed) && !t.symbol.isParametricIn(carrier) => + if !t.symbol.is(Sealed) && !hi.hasAnnotation(defn.Caps_SealedAnnot) + && !t.symbol.isParametricIn(carrier) => if hi.isAny then report.error( em"""$what cannot $have $tp since From b5fe6d2400e1240127f0692ca21152165d8d29ba Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 21:26:57 +0100 Subject: [PATCH 145/216] Coarse restriction to disallow local roots in external types This needs to be refined further for class members, similar to how we check that private types cannot escape from a class API. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 16 ++++++++++++++ tests/neg-custom-args/captures/filevar.scala | 2 +- .../neg-custom-args/captures/localcaps.check | 12 ++++++++++ .../neg-custom-args/captures/localcaps.scala | 2 +- tests/neg-custom-args/captures/pairs.check | 8 +++++++ tests/neg-custom-args/captures/pairs.scala | 4 ++-- .../recursive-leaking-local-cap.scala | 22 +++++++++++++++++++ .../captures/sealed-classes.scala | 21 ++++++++++++++++++ 8 files changed, 83 insertions(+), 4 deletions(-) create mode 100644 tests/neg-custom-args/captures/localcaps.check create mode 100644 tests/neg-custom-args/captures/recursive-leaking-local-cap.scala create mode 100644 tests/neg-custom-args/captures/sealed-classes.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c607e85e661d..b29809a69427 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1299,6 +1299,20 @@ class CheckCaptures extends Recheck, SymTransformer: checker.traverse(tree.knownType) end healTypeParam + def checkNoLocalRootIn(sym: Symbol, info: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(tp: Type) = tp match + case tp: TermRef if tp.isLocalRootCapability => + if tp.localRootOwner == sym then + report.error(i"local root $tp cannot appear in type of $sym", pos) + case tp: ClassInfo => + traverseChildren(tp) + for mbr <- tp.decls do + if !mbr.is(Private) then checkNoLocalRootIn(sym, mbr.info, mbr.srcPos) + case _ => + traverseChildren(tp) + check.traverse(info) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1322,6 +1336,8 @@ class CheckCaptures extends Recheck, SymTransformer: checkBounds(normArgs, tl) args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) case _ => + case _: ValOrDefDef | _: TypeDef => + checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) case _ => end check end checker diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index c8280e2ff3b7..34588617c0b8 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,7 +5,7 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^{cap[Service]} = uninitialized + var file: File^{cap[Service]} = uninitialized // error def log = file.write("log") def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = diff --git a/tests/neg-custom-args/captures/localcaps.check b/tests/neg-custom-args/captures/localcaps.check new file mode 100644 index 000000000000..b09702749d10 --- /dev/null +++ b/tests/neg-custom-args/captures/localcaps.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-custom-args/captures/localcaps.scala:4:12 ---------------------------------------------------------- +4 | def x: C^{cap[d]} = ??? // error + | ^^^^^^ + | `d` does not name an outer definition that represents a capture level +-- Error: tests/neg-custom-args/captures/localcaps.scala:9:47 ---------------------------------------------------------- +9 | private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error + | ^^^^^^^ + | `z2` does not name an outer definition that represents a capture level +-- Error: tests/neg-custom-args/captures/localcaps.scala:6:6 ----------------------------------------------------------- +6 | def y: C^{cap[C]} = ??? // error + | ^ + | local root (cap[C] : caps.Cap) cannot appear in type of class C diff --git a/tests/neg-custom-args/captures/localcaps.scala b/tests/neg-custom-args/captures/localcaps.scala index f5227bfef96b..049a1ee0d775 100644 --- a/tests/neg-custom-args/captures/localcaps.scala +++ b/tests/neg-custom-args/captures/localcaps.scala @@ -3,7 +3,7 @@ class C: def x: C^{cap[d]} = ??? // error - def y: C^{cap[C]} = ??? // ok + def y: C^{cap[C]} = ??? // error private val z = (c0: caps.Cap) => (x: Int) => (c: C^{cap[C]}) => x // ok private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error diff --git a/tests/neg-custom-args/captures/pairs.check b/tests/neg-custom-args/captures/pairs.check index 38712469879f..9d1b3a76e164 100644 --- a/tests/neg-custom-args/captures/pairs.check +++ b/tests/neg-custom-args/captures/pairs.check @@ -12,3 +12,11 @@ | Required: Cap^ ->{d} Unit | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/pairs.scala:6:8 --------------------------------------------------------------- +6 | def fst: Cap^{cap[Pair]} ->{x} Unit = x // error + | ^ + | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair +-- Error: tests/neg-custom-args/captures/pairs.scala:7:8 --------------------------------------------------------------- +7 | def snd: Cap^{cap[Pair]} ->{y} Unit = y // error + | ^ + | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair diff --git a/tests/neg-custom-args/captures/pairs.scala b/tests/neg-custom-args/captures/pairs.scala index 4fc495d60f95..99b27639f729 100644 --- a/tests/neg-custom-args/captures/pairs.scala +++ b/tests/neg-custom-args/captures/pairs.scala @@ -3,8 +3,8 @@ object Monomorphic2: class Pair(x: Cap => Unit, y: Cap => Unit): - def fst: Cap^{cap[Pair]} ->{x} Unit = x - def snd: Cap^{cap[Pair]} ->{y} Unit = y + def fst: Cap^{cap[Pair]} ->{x} Unit = x // error + def snd: Cap^{cap[Pair]} ->{y} Unit = y // error def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala new file mode 100644 index 000000000000..0daecafbf9d0 --- /dev/null +++ b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking +trait Cap: + def use: Int = 42 + +def usingCap[sealed T](op: Cap^ => T): T = ??? + +def badTest(): Unit = + def bad(b: Boolean)(c: Cap^): Cap^{cap[bad]} = // error + if b then c + else + val leaked = usingCap[Cap^{cap[bad]}](bad(true)) + leaked.use // boom + c + + usingCap[Unit]: c0 => + bad(false)(c0) + +class Bad: + def foo: Cap^{cap[Bad]} = ??? // error + private def bar: Cap^{cap[Bad]} = ??? // ok + + diff --git a/tests/neg-custom-args/captures/sealed-classes.scala b/tests/neg-custom-args/captures/sealed-classes.scala new file mode 100644 index 000000000000..b8cb0acbf5c5 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-classes.scala @@ -0,0 +1,21 @@ +abstract class C1[A1]: + def set(x: A1): Unit + def get: A1 + +trait Co[+A]: + def get: A + +class C2[sealed A2] extends C1[A2], Co[A2]: // ok + private var x: A2 = ??? + def set(x: A2): Unit = + this.x = x + def get: A2 = x + +class C3[A3] extends C2[A3] // error + +abstract class C4[sealed A4] extends Co[A4] // ok + +abstract class C5[sealed +A5] extends Co[A5] // ok + +abstract class C6[A6] extends C5[A6] // error + From 1fb06194404ae67decfd94383778f80d644ef57c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:20:42 +0100 Subject: [PATCH 146/216] Require array element types to be sealed --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 6 ++++ .../src/dotty/tools/dotc/cc/CaptureSet.scala | 1 + .../dotty/tools/dotc/cc/CheckCaptures.scala | 31 ++++++++++++++----- .../dotty/tools/dotc/transform/Recheck.scala | 6 ++-- tests/neg-custom-args/captures/buffers.check | 26 ++++++++++++++++ tests/neg-custom-args/captures/buffers.scala | 30 ++++++++++++++++++ 6 files changed, 90 insertions(+), 10 deletions(-) create mode 100644 tests/neg-custom-args/captures/buffers.check create mode 100644 tests/neg-custom-args/captures/buffers.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index dccf07ba199e..0fe79da30ca5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -206,6 +206,12 @@ extension (tp: Type) case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) case _ => false + def isSealed(using Context): Boolean = tp match + case tp: TypeParamRef => tp.underlying.isSealed + case tp: TypeBounds => tp.hi.hasAnnotation(defn.Caps_SealedAnnot) + case tp: TypeRef => tp.symbol.is(Sealed) || tp.info.isSealed // TODO: drop symbol flag? + case _ => false + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 2586d449dfd4..7261c760aa01 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -872,6 +872,7 @@ object CaptureSet: upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty + else if ctx.mode.is(Mode.Printing) then upper else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") /** Apply `f` to each element in `xs`, and join result sets with `++` */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index b29809a69427..bd27fd30580b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -148,7 +148,7 @@ object CheckCaptures: val check = new TypeTraverser: extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - val encl = carrier.owner.enclosingMethodOrClass + val encl = carrier.maybeOwner.enclosingMethodOrClass if encl.isClass then tparam.isParametricIn(encl) else def recur(encl: Symbol): Boolean = @@ -160,11 +160,9 @@ object CheckCaptures: def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") + capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") t.info match - case TypeBounds(_, hi) - if !t.symbol.is(Sealed) && !hi.hasAnnotation(defn.Caps_SealedAnnot) - && !t.symbol.isParametricIn(carrier) => + case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => if hi.isAny then report.error( em"""$what cannot $have $tp since @@ -543,8 +541,8 @@ class CheckCaptures extends Recheck, SymTransformer: val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] - for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do - if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then + for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do + if formal.isSealed then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" disallowRootCapabilitiesIn(arg.knownType, fn.symbol, i"Sealed type variable $pname", "be instantiated to", @@ -1313,6 +1311,23 @@ class CheckCaptures extends Recheck, SymTransformer: traverseChildren(tp) check.traverse(info) + def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(t: Type): Unit = + t match + case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) then + CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, + "Array", "have element type", + "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", + pos) + traverseChildren(t) + case defn.RefinedFunctionOf(rinfo: MethodType) => + traverse(rinfo) + case _ => + traverseChildren(t) + check.traverse(tp) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1338,6 +1353,8 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => case _: ValOrDefDef | _: TypeDef => checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) + case tree: TypeTree => + checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 9833b3cf177f..b15a58b98b6f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -596,9 +596,9 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase) { - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) - } + atPhase(thisPhase): + withMode(Mode.Printing): + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check new file mode 100644 index 000000000000..cdb7baa852fb --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.check @@ -0,0 +1,26 @@ +-- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^ + | mutable variable elems cannot have type Array[A] since + | that type refers to the type variable A, which is not sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- +16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + | ^^^^^^^^^^^ + | Sealed type variable A cannot be instantiated to box A^? since + | that type refers to the type variable A, which is not sealed. + | This is often caused by a local capability in an argument of constructor ArrayBuffer + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/buffers.scala:11:13 ----------------------------------------------------------- +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type refers to the type variable A, which is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ +22 | val x: Array[A] = new Array[A](10) // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type refers to the type variable A, which is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/buffers.scala b/tests/neg-custom-args/captures/buffers.scala new file mode 100644 index 000000000000..760ddab96ae5 --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.scala @@ -0,0 +1,30 @@ +import reflect.ClassTag + +class Buffer[A] + +class ArrayBuffer[sealed A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +class ArrayBufferBAD[A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) // error // error + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +object ArrayBuffer: + def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + elems = xs.toArray + def apply[sealed A: ClassTag](xs: A*) = new ArrayBuffer: + elems = xs.toArray // ok + +class EncapsArray[A: ClassTag]: + val x: Array[A] = new Array[A](10) // error + + + + + + + + From 8540fb84da0d4f9beca0075194fd28ab21866ed4 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:05:03 +0100 Subject: [PATCH 147/216] Don't generate capture set variables for self types of pure classes The tricky thing here is how to recognize that a class is pure since that is known only during capture checking and we are at Setup, the phase before. But we can approximate by treating the `Pure` trait as definitely pure. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 ++- .../dotty/tools/dotc/core/Definitions.scala | 2 +- tests/pos-custom-args/captures/steppers.scala | 27 +++++++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/steppers.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 68fd79048f41..e90a8394f87d 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -522,7 +522,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: tree.symbol match case cls: ClassSymbol => val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then + if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic) + && !cls.isPureClass + then // add capture set to self type of nested classes if no self type is given explicitly. val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls)) val ps1 = inContext(ctx.withOwner(cls)): diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 205d43cd07ca..40370973ebf0 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1443,7 +1443,7 @@ class Definitions { /** Base classes that are assumed to be pure for the purposes of capture checking. * Every class inheriting from a pure baseclass is pure. */ - @tu lazy val pureBaseClasses = Set(defn.ThrowableClass) + @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass) /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, */ diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala new file mode 100644 index 000000000000..815ac938b492 --- /dev/null +++ b/tests/pos-custom-args/captures/steppers.scala @@ -0,0 +1,27 @@ + +trait Stepper[+A]: + this: Stepper[A]^ => + +object Stepper: + trait EfficientSplit + +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure + +trait IterableOnce[+A] extends Any: + this: IterableOnce[A]^ => + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ??? + +sealed abstract class ArraySeq[sealed T] extends IterableOnce[T], Pure: + def array: Array[_] + + def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]] + ArraySeq.make(arr).asInstanceOf[ArraySeq[T]] + +object ArraySeq: + + def make[sealed T](x: Array[T]): ArraySeq[T] = ??? + + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure: + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ??? + From b50fe548b63d71da5486bd0533d15f730a058faf Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:54:28 +0100 Subject: [PATCH 148/216] Avoid reporting post check messages several times --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 1 + .../tools/dotc/reporting/UniqueMessagePositions.scala | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index e90a8394f87d..8ba53693870c 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -707,4 +707,5 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postCheck()(using Context): Unit = for chk <- todoAtPostCheck do chk(ctx) + todoAtPostCheck.clear() end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 98fd7da3032a..71b2636ab8ed 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter { || dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value - && (dia.pos.start to dia.pos.end).exists(pos => - positions.get((ctx.source, pos)).exists(_.hides(dia))) + && (dia.pos.start to dia.pos.end).exists: offset => + positions.get((ctx.source, offset)).exists(_.hides(dia)) override def markReported(dia: Diagnostic)(using Context): Unit = if dia.pos.exists then - for (pos <- dia.pos.start to dia.pos.end) - positions.get(ctx.source, pos) match + for offset <- dia.pos.start to dia.pos.end do + positions.get((ctx.source, offset)) match case Some(dia1) if dia1.hides(dia) => - case _ => positions((ctx.source, pos)) = dia + case _ => positions((ctx.source, offset)) = dia super.markReported(dia) } From 876e5ee8a1d872d86f751751b58872d764295974 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 10:37:22 +0100 Subject: [PATCH 149/216] Don't flag wildcard array arguments for not being sealed --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index bd27fd30580b..94aff4d314a4 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -21,7 +21,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.DefaultGetterName +import NameKinds.{DefaultGetterName, WildcardParamName} import reporting.trace /** The capture checker */ @@ -1316,7 +1316,9 @@ class CheckCaptures extends Recheck, SymTransformer: def traverse(t: Type): Unit = t match case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => - if !(pos.span.isSynthetic && ctx.reporter.errorsReported) then + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) + && !arg.typeSymbol.name.is(WildcardParamName) + then CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, "Array", "have element type", "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", @@ -1339,10 +1341,11 @@ class CheckCaptures extends Recheck, SymTransformer: val lctx = tree match case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol) case _ => ctx - traverseChildren(tree)(using lctx) - check(tree) + trace(i"post check $tree"): + traverseChildren(tree)(using lctx) + check(tree) def check(tree: Tree)(using Context) = tree match - case t @ TypeApply(fun, args) => + case TypeApply(fun, args) => fun.knownType.widen match case tl: PolyType => val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => From 84f313caf35baa9e35693395a85657272f78a9af Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 11:36:27 +0100 Subject: [PATCH 150/216] Fix isPureClass test --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 6 +++++- .../captures/exception-definitions.check | 8 ++++---- tests/neg-custom-args/captures/leaked-curried.check | 11 ++++------- tests/neg-custom-args/captures/leaked-curried.scala | 4 ++-- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 0fe79da30ca5..40e94ebde5dd 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -231,7 +231,11 @@ extension (cls: ClassSymbol) && bc.givenSelfType.dealiasKeepAnnots.match case CapturingType(_, refs) => refs.isAlwaysEmpty case RetainingType(_, refs) => refs.isEmpty - case selfType => selfType.exists && selfType.captureSet.isAlwaysEmpty + case selfType => + isCaptureChecking // At Setup we have not processed self types yet, so + // unless a self type is explicitly given, we can't tell + // and err on the side of impure. + && selfType.exists && selfType.captureSet.isAlwaysEmpty extension (sym: Symbol) diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 16d623e64f7c..4b1fe0273f52 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -6,8 +6,8 @@ -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable --- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2 +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ---------------------------------------------- 8 | class Err3(c: Any^) extends Exception // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of pure base class class Throwable + | ^ + | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3 diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check index c23d1516acf5..3f0a9800a4ec 100644 --- a/tests/neg-custom-args/captures/leaked-curried.check +++ b/tests/neg-custom-args/captures/leaked-curried.check @@ -2,10 +2,7 @@ 14 | () => () => io // error | ^^ |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/leaked-curried.scala:15:10 ------------------------------- -15 | class Foo extends Box, Pure: // error - | ^ - | illegal inheritance: self type Foo^{io} of class Foo does not conform to self type Pure - | of parent trait Pure - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ---------------------------------------------------- +17 | () => () => io // error + | ^^ + |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala index a7c48219b450..f9238259e065 100644 --- a/tests/neg-custom-args/captures/leaked-curried.scala +++ b/tests/neg-custom-args/captures/leaked-curried.scala @@ -12,8 +12,8 @@ def main(): Unit = self => val get: () ->{} () ->{io} Cap^ = () => () => io // error - class Foo extends Box, Pure: // error + class Foo extends Box, Pure: val get: () ->{} () ->{io} Cap^ = - () => () => io + () => () => io // error new Foo val bad = leaked.get()().use() // using a leaked capability From f663665f607023ede1e5d8550f751f34c57dd874 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 15:19:47 +0100 Subject: [PATCH 151/216] Make sealed an annotation # Conflicts: # tests/pos-special/stdlib/collection/ArrayOps.scala --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 9 ++++++++- tests/neg/class-mods.scala | 2 +- tests/pos-custom-args/captures/sealed-value-class.scala | 3 +++ 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/sealed-value-class.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 7ef552e3661c..5361f37c2a76 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1042,7 +1042,14 @@ class Namer { typer: Typer => tp val rhs1 = typedAheadType(rhs) - val rhsBodyType: TypeBounds = addVariances(rhs1.tpe).toBounds + val rhsBodyType: TypeBounds = + val bounds = addVariances(rhs1.tpe).toBounds + if sym.is(Sealed) then + sym.resetFlag(Sealed) + bounds.derivedTypeBounds(bounds.lo, + AnnotatedType(bounds.hi, Annotation(defn.Caps_SealedAnnot, rhs1.span))) + else bounds + val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType) def opaqueToBounds(info: Type): Type = diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala index 60e9fb279364..cf4348ad42d7 100644 --- a/tests/neg/class-mods.scala +++ b/tests/neg/class-mods.scala @@ -2,7 +2,7 @@ open final class Foo1 // error sealed open class Foo2 // error open type T1 // error -sealed type T2 // error +type T2 // ok abstract type T3 // error abstract open type T4 // error diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala new file mode 100644 index 000000000000..b5f25bf2d203 --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-value-class.scala @@ -0,0 +1,3 @@ +class Ops[sealed A](xs: Array[A]) extends AnyVal: + + def f(p: A => Boolean): Array[A] = xs From d778a3dea59bf0796931de1446fddc972c4c65df Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:31:56 +0100 Subject: [PATCH 152/216] Avoid infinite recursions when checking F-bounded types --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 94aff4d314a4..a8ff9d3d5955 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -14,7 +14,7 @@ import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPa import typer.Checking.{checkBounds, checkAppliedTypesIn} import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} -import util.{SimpleIdentitySet, EqHashMap, SrcPos, Property} +import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.SymUtils.* import transform.{Recheck, PreRecheck} import Recheck.* @@ -147,6 +147,8 @@ object CheckCaptures: private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: + private val seen = new EqHashSet[TypeRef] + extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = val encl = carrier.maybeOwner.enclosingMethodOrClass if encl.isClass then tparam.isParametricIn(encl) @@ -160,19 +162,21 @@ object CheckCaptures: def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") - t.info match - case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => - if hi.isAny then - report.error( - em"""$what cannot $have $tp since - |that type refers to the type variable $t, which is not sealed. - |$addendum""", - pos) - else - traverse(hi) - case _ => - traverseChildren(t) + if !seen.contains(t) then + capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") + seen += t + t.info match + case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => + if hi.isAny then + report.error( + em"""$what cannot $have $tp since + |that type refers to the type variable $t, which is not sealed. + |$addendum""", + pos) + else + traverse(hi) + case _ => + traverseChildren(t) case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () case t => From e033cd1631803d1d874cd179076c537e3e9b2e8f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 21:43:46 +0100 Subject: [PATCH 153/216] Survive "cannot establish a reference" errors in TreeTypeMap --- compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 955892b2ae22..d2e18729836b 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -105,7 +105,8 @@ class TreeTypeMap( tree1.withType(mapType(tree1.tpe)) match { case id: Ident => if needsSelect(id.tpe) then - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + catch case ex: TypeError => super.transform(id) else super.transform(id) case sel: Select => From 9d31fb2311730560098f7be1d5b8d0115315cfca Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 11:08:46 +0100 Subject: [PATCH 154/216] Make SubstRecThis typemap idempotent --- compiler/src/dotty/tools/dotc/core/Substituters.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 5a641416b3e1..bd30177adcb4 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -189,7 +189,7 @@ object Substituters: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } From 7d950b799cce572a13a520133d4a19caa128276f Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 17:52:22 +0100 Subject: [PATCH 155/216] Better error message for capture errors involving self types --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a8ff9d3d5955..066bba19252c 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -263,11 +263,12 @@ class CheckCaptures extends Recheck, SymTransformer: pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, + provenance: => String = "", cs1description: String = "")(using Context) = checkOK( cs1.subCaptures(cs2, frozen = false), - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head} is not" - else i"references $cs1 are not all", + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" + else i"references $cs1$cs1description are not all", pos, provenance) /** The current environment */ @@ -683,9 +684,15 @@ class CheckCaptures extends Recheck, SymTransformer: if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) + def selfType = impl.body + .collect: + case TypeDef(tpnme.SELF, rhs) => rhs + .headOption + .getOrElse(tree) + .orElse(tree) checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - tree.srcPos) + selfType.srcPos, cs1description = " captured by this self type") super.recheckClassDef(tree, impl, cls) finally curEnv = saved From 5610730b9c64f24459dccc6323cc27314c7898e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 10:54:57 +0100 Subject: [PATCH 156/216] Add sealed refs test and fix check files of other tests --- tests/neg-custom-args/captures/cc-this.check | 2 +- tests/neg-custom-args/captures/cc-this2.check | 14 +++++-- .../captures/cc-this2/D_2.scala | 2 +- .../captures/exception-definitions.check | 9 ++-- .../captures/exception-definitions.scala | 4 +- .../captures/sealed-refs.scala | 42 +++++++++++++++++++ 6 files changed, 60 insertions(+), 13 deletions(-) create mode 100644 tests/neg-custom-args/captures/sealed-refs.scala diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 335302c5c259..070e815d6d45 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -12,4 +12,4 @@ -- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (C4.this.f : () => Int) is not included in the allowed capture set {} of pure base class class C3 + |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index 5e43a45b67f5..bd9a1085d262 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,6 +1,12 @@ --- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- +3 | this: D^ => // error + | ^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C +-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- 2 |class D extends C: // error - |^ - |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class C -3 | this: D^ => + | ^ + | illegal inheritance: self type D^ of class D does not conform to self type C + | of parent class C + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index b22e5e456092..de1a722f73a9 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: D^ => + this: D^ => // error diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 4b1fe0273f52..72b88f252e59 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,8 +1,7 @@ --- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- -2 |class Err extends Exception: // error - |^ - |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class Throwable -3 | self: Err^ => +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- +3 | self: Err^ => // error + | ^^^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index a19b751825b8..fbc9f3fd1d33 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,6 +1,6 @@ -class Err extends Exception: // error - self: Err^ => +class Err extends Exception: + self: Err^ => // error def test(c: Any^) = class Err2 extends Exception: diff --git a/tests/neg-custom-args/captures/sealed-refs.scala b/tests/neg-custom-args/captures/sealed-refs.scala new file mode 100644 index 000000000000..05fa483acf28 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-refs.scala @@ -0,0 +1,42 @@ +class Ref[sealed A](init: A): + this: Ref[A]^ => + private var x: A = init + def get: A = x + def set(x: A): Unit = this.x = x + +class It[X]: + this: It[X]^ => + +def f1[B1](x: B1, next: B1 -> B1) = + var r = x // ok + r = next(x) + r + +def f2[B2](x: B2, next: B2 -> B2) = + val r = Ref[B2](x) // error + r.set(next(x)) + r.get + +def g[sealed B](x: B, next: B -> B) = + val r = Ref[B](x) // ok + r.set(next(x)) + r.get + +import annotation.unchecked.uncheckedCaptures + +def h[B](x: B, next: B -> B) = + val r = Ref[B @uncheckedCaptures](x) // ok + r.set(next(x)) + r.get + +def f3[B](x: B, next: B -> B) = + val r: Ref[B^{cap[f3]}] = Ref[B^{cap[f3]}](x) // error + r.set(next(x)) + val y = r.get + () + +def f4[B](x: B, next: B -> B) = + val r: Ref[B]^{cap[f4]} = Ref[B](x) // error + r.set(next(x)) + val y = r.get + () \ No newline at end of file From d4c084c4269a0bf43e2e402a204afb7d89307fd8 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 12:20:15 +0100 Subject: [PATCH 157/216] Refine isParametric tests Mutable variables can appeal to parametricty only if they are not captured. We use "not captured by any closure" as a sound approximation for that, since variables themselves are currently not tracked, so we cannot use soemthing more finegrained. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 89 +++++++++++++++---- tests/neg-custom-args/captures/buffers.check | 6 +- tests/neg-custom-args/captures/levels.check | 4 +- .../captures/sealed-leaks.check | 50 +++++++++++ .../captures/sealed-leaks.scala | 32 ++++++- 5 files changed, 160 insertions(+), 21 deletions(-) create mode 100644 tests/neg-custom-args/captures/sealed-leaks.check diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 066bba19252c..48e2d7635a80 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -16,7 +16,7 @@ import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.SymUtils.* -import transform.{Recheck, PreRecheck} +import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} @@ -149,15 +149,25 @@ object CheckCaptures: private val seen = new EqHashSet[TypeRef] + /** Check that there is at least one method containing carrier and defined + * in the scope of tparam. E.g. this is OK: + * def f[T] = { ... var x: T ... } + * So is this: + * class C[T] { def f() = { class D { var x: T }}} + * But this is not OK: + * class C[T] { object o { var x: T }} + */ extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - val encl = carrier.maybeOwner.enclosingMethodOrClass - if encl.isClass then tparam.isParametricIn(encl) - else - def recur(encl: Symbol): Boolean = - if tparam.owner == encl then true - else if encl.isStatic || !encl.exists then false - else recur(encl.owner.enclosingMethodOrClass) - recur(encl) + carrier.exists && { + val encl = carrier.owner.enclosingMethodOrClass + if encl.isClass then tparam.isParametricIn(encl) + else + def recur(encl: Symbol): Boolean = + if tparam.owner == encl then true + else if encl.isStatic || !encl.exists then false + else recur(encl.owner.enclosingMethodOrClass) + recur(encl) + } def traverse(t: Type) = t.dealiasKeepAnnots match @@ -168,9 +178,12 @@ object CheckCaptures: t.info match case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => if hi.isAny then + val detailStr = + if t eq tp then "variable" + else i"refers to the type variable $t, which" report.error( em"""$what cannot $have $tp since - |that type refers to the type variable $t, which is not sealed. + |that type $detailStr is not sealed. |$addendum""", pos) else @@ -549,7 +562,7 @@ class CheckCaptures extends Recheck, SymTransformer: for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do if formal.isSealed then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" - disallowRootCapabilitiesIn(arg.knownType, fn.symbol, + disallowRootCapabilitiesIn(arg.knownType, NoSymbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) @@ -590,13 +603,58 @@ class CheckCaptures extends Recheck, SymTransformer: openClosures = openClosures.tail end recheckClosureBlock + /** Maps mutable variables to the symbols that capture them (in the + * CheckCaptures sense, i.e. symbol is referred to from a different method + * than the one it is defined in). + */ + private val capturedBy = util.HashMap[Symbol, Symbol]() + + /** Maps anonymous functions appearing as function arguments to + * the function that is called. + */ + private val anonFunCallee = util.HashMap[Symbol, Symbol]() + + /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`. + */ + private def collectCapturedMutVars(using Context) = new TreeTraverser: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + capturedBy(sym) = enclMeth + case Apply(fn, args) => + for case closureDef(mdef) <- args do + anonFunCallee(mdef.symbol) = fn.symbol + traverseChildren(tree) + case Inlined(_, bindings, expansion) => + traverse(bindings) + traverse(expansion) + case mdef: DefDef => + if !mdef.symbol.isInlineMethod then traverseChildren(tree) + case _ => + traverseChildren(tree) + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = try if sym.is(Module) then sym.info // Modules are checked by checking the module class else if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then - disallowRootCapabilitiesIn(tree.tpt.knownType, sym, - i"mutable $sym", "have type", "", sym.srcPos) + val (carrier, addendum) = capturedBy.get(sym) match + case Some(encl) => + val enclStr = + if encl.isAnonymousFunction then + val location = anonFunCallee.get(encl) match + case Some(meth) if meth.exists => i" argument in a call to $meth" + case _ => "" + s"an anonymous function$location" + else encl.show + (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr") + case _ => + (sym, "") + disallowRootCapabilitiesIn( + tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then @@ -1168,11 +1226,12 @@ class CheckCaptures extends Recheck, SymTransformer: private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup] override def checkUnit(unit: CompilationUnit)(using Context): Unit = - setup.setupUnit(ctx.compilationUnit.tpdTree, completeDef) + setup.setupUnit(unit.tpdTree, completeDef) + collectCapturedMutVars.traverse(unit.tpdTree) if ctx.settings.YccPrintSetup.value then val echoHeader = "[[syntax tree at end of cc setup]]" - val treeString = show(ctx.compilationUnit.tpdTree) + val treeString = show(unit.tpdTree) report.echo(s"$echoHeader\n$treeString\n") withCaptureSetsExplained: diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check index cdb7baa852fb..07acea3c48e3 100644 --- a/tests/neg-custom-args/captures/buffers.check +++ b/tests/neg-custom-args/captures/buffers.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ 11 | var elems: Array[A] = new Array[A](10) // error // error | ^ - | mutable variable elems cannot have type Array[A] since + | Mutable variable elems cannot have type Array[A] since | that type refers to the type variable A, which is not sealed. -- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- 16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error @@ -14,13 +14,13 @@ 11 | var elems: Array[A] = new Array[A](10) // error // error | ^^^^^^^^ | Array cannot have element type A since - | that type refers to the type variable A, which is not sealed. + | that type variable is not sealed. | Since arrays are mutable, they have to be treated like variables, | so their element type must be sealed. -- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ 22 | val x: Array[A] = new Array[A](10) // error | ^^^^^^^^ | Array cannot have element type A since - | that type refers to the type variable A, which is not sealed. + | that type variable is not sealed. | Since arrays are mutable, they have to be treated like variables, | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index f91f90fb652f..c0cc7f0a759c 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,8 +1,8 @@ -- Error: tests/neg-custom-args/captures/levels.scala:6:16 ------------------------------------------------------------- 6 | private var v: T = init // error | ^ - | mutable variable v cannot have type T since - | that type refers to the type variable T, which is not sealed. + | Mutable variable v cannot have type T since + | that type variable is not sealed. -- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ 17 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-custom-args/captures/sealed-leaks.check b/tests/neg-custom-args/captures/sealed-leaks.check new file mode 100644 index 000000000000..f7098eba32b6 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-leaks.check @@ -0,0 +1,50 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/sealed-leaks.scala:31:6 ------------------------------ +31 | () + | ^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:12:27 ------------------------------------------------------ +12 | val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to (() => Unit) | Null since + | that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of method usingLogFile + | leaking as part of its result. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/sealed-leaks.scala:19:26 --------------------------------- +19 | usingLogFile { f => x = f } // error + | ^ + | Found: (f : java.io.FileOutputStream^) + | Required: (java.io.FileOutputStream | Null)^{cap[Test2]} + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:30:10 ------------------------------------------------------ +30 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:39:8 ------------------------------------------------------- +39 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. + | + | Note that variable x does not count as local since it is captured by an anonymous function +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:43:8 ------------------------------------------------------- +43 | var x: T = y // error + | ^ + |Mutable variable x cannot have type T since + |that type variable is not sealed. + | + |Note that variable x does not count as local since it is captured by an anonymous function argument in a call to method identity +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:47:8 ------------------------------------------------------- +47 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. + | + | Note that variable x does not count as local since it is captured by method foo +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:11:14 ------------------------------------------------------ +11 | val later = usingLogFile { f => () => f.write(0) } // error + | ^^^^^^^^^^^^ + | local reference f leaks into outer capture set of type parameter T of method usingLogFile diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala index a7acf77b5678..2555ba8a3e07 100644 --- a/tests/neg-custom-args/captures/sealed-leaks.scala +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -18,4 +18,34 @@ def Test2 = usingLogFile { f => x = f } // error - later() \ No newline at end of file + later() + +def Test3 = + def f[T](y: T) = + var x: T = y + () + + class C[T](y: T): + object o: + var x: T = y // error + () + + class C2[T](y: T): + def f = + var x: T = y // ok + () + + def g1[T](y: T): T => Unit = + var x: T = y // error + y => x = y + + def g2[T](y: T): T => Unit = + var x: T = y // error + identity(y => x = y) + + def g3[T](y: T): Unit = + var x: T = y // error + def foo = + x = y + () + From dccadb9e8c394043cafb3cc42c259480288144b5 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 18:09:18 +0100 Subject: [PATCH 158/216] Don't recheck inherited trait parameters during capture checking The logic gets confused by the added capture refinements. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 2 + .../tools/dotc/transform/CapturedVars.scala | 55 +++++++------------ .../dotty/tools/dotc/typer/RefChecks.scala | 5 +- .../captures/sealed-lowerbound.scala | 12 ++++ 4 files changed, 38 insertions(+), 36 deletions(-) create mode 100644 tests/pos-custom-args/captures/sealed-lowerbound.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 48e2d7635a80..a49bd9f79351 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1190,6 +1190,8 @@ class CheckCaptures extends Recheck, SymTransformer: override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = !setup.isPreCC(overriding) && !setup.isPreCC(overridden) + + override def checkInheritedTraitParameters: Boolean = false end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index a018bbd1a3ac..202e3d72fa25 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -13,25 +13,20 @@ import core.NameKinds.TempResultName import core.Constants._ import util.Store import dotty.tools.uncheckedNN - -import scala.compiletime.uninitialized +import ast.tpd.* +import compiletime.uninitialized /** This phase translates variables that are captured in closures to * heap-allocated refs. */ class CapturedVars extends MiniPhase with IdentityDenotTransformer: thisPhase => - import ast.tpd._ override def phaseName: String = CapturedVars.name override def description: String = CapturedVars.description - private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = uninitialized - private def captured(using Context) = ctx.store(Captured) - - override def initContext(ctx: FreshContext): Unit = - Captured = ctx.addLocation(util.ReadOnlySet.empty) + private val captured = util.HashSet[Symbol]() private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -57,33 +52,10 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: myRefInfo.uncheckedNN } - private class CollectCaptured extends TreeTraverser { - private val captured = util.HashSet[Symbol]() - def traverse(tree: Tree)(using Context) = tree match { - case id: Ident => - val sym = id.symbol - if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { - val enclMeth = ctx.owner.enclosingMethod - if (sym.enclosingMethod != enclMeth) { - report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") - captured += sym - } - } - case _ => - traverseChildren(tree) - } - def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { - traverse(tree) - captured - } - } - - override def prepareForUnit(tree: Tree)(using Context): Context = { - val captured = atPhase(thisPhase) { - CollectCaptured().runOver(ctx.compilationUnit.tpdTree) - } - ctx.fresh.updateStore(Captured, captured) - } + override def prepareForUnit(tree: Tree)(using Context): Context = + captured.clear() + atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree) + ctx /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile @@ -143,3 +115,16 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: object CapturedVars: val name: String = "capturedVars" val description: String = "represent vars captured by closures as heap objects" + + def collect(captured: util.HashSet[Symbol]): TreeTraverser = new: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") + captured += sym + case _ => + traverseChildren(tree) +end CapturedVars diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index eef88e76971e..af279844f370 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -267,6 +267,9 @@ object RefChecks { if !other.is(Deferred) then checkOverride(subtypeChecker, dcl, other) end checkAll + + // Disabled for capture checking since traits can get different parameter refinements + def checkInheritedTraitParameters: Boolean = true end OverridingPairsChecker /** 1. Check all members of class `clazz` for overriding conditions. @@ -851,7 +854,7 @@ object RefChecks { checkCaseClassInheritanceInvariant() } - if (!clazz.is(Trait)) { + if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) { // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala new file mode 100644 index 000000000000..e848f784cddc --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-lowerbound.scala @@ -0,0 +1,12 @@ +def foo[sealed B](x: B): B = x + +def bar[B, sealed A >: B](x: A): A = foo[A](x) + +class C[sealed A] + +class CV[sealed A](x: Int): + def this() = this: + val x = new C[A]: + println("foo") + 0 + From 3caf116571771df1adc251523dfe5d0b511fb7e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 17:44:16 +0100 Subject: [PATCH 159/216] Original version of stdlib collections without capture checking --- .../dotty/tools/dotc/CompilationTests.scala | 2 +- tests/pos-special/stdlib/Test1.scala | 34 - tests/pos-special/stdlib/Test2.scala | 232 -- .../stdlib/collection/ArrayOps.scala | 1663 +++++++++++ .../stdlib/collection/BitSet.scala | 348 +++ .../stdlib/collection/BufferedIterator.scala | 32 + .../stdlib/collection/BuildFrom.scala | 122 + .../stdlib/collection/DefaultMap.scala | 21 + .../stdlib/collection/Factory.scala | 784 ++++++ .../stdlib/collection/Hashing.scala | 62 + .../stdlib/collection/IndexedSeq.scala | 3 +- .../stdlib/collection/IndexedSeqView.scala | 180 ++ .../stdlib/collection/Iterable.scala | 153 +- .../stdlib/collection/IterableOnce.scala | 122 +- .../stdlib/collection/Iterator.scala | 138 +- .../stdlib/collection/JavaConverters.scala | 335 +++ .../stdlib/collection/LazyZipOps.scala | 422 +++ .../stdlib/collection/LinearSeq.scala | 9 +- tests/pos-special/stdlib/collection/Map.scala | 19 +- .../stdlib/collection/MapView.scala | 187 ++ .../stdlib/collection/Searching.scala | 57 + tests/pos-special/stdlib/collection/Seq.scala | 38 +- .../stdlib/collection/SeqMap.scala | 40 + .../stdlib/collection/SeqView.scala | 209 ++ tests/pos-special/stdlib/collection/Set.scala | 269 ++ .../stdlib/collection/SortedMap.scala | 220 ++ .../stdlib/collection/SortedOps.scala | 90 + .../stdlib/collection/SortedSet.scala | 189 ++ .../stdlib/collection/Stepper.scala | 368 +++ .../stdlib/collection/StepperShape.scala | 114 + .../StrictOptimizedIterableOps.scala | 24 +- .../collection/StrictOptimizedMapOps.scala | 48 + .../collection/StrictOptimizedSeqOps.scala | 9 +- .../collection/StrictOptimizedSetOps.scala | 29 + .../StrictOptimizedSortedMapOps.scala | 46 + .../StrictOptimizedSortedSetOps.scala | 42 + .../stdlib/collection/StringOps.scala | 35 +- .../stdlib/collection/StringParsers.scala | 319 +++ .../pos-special/stdlib/collection/View.scala | 138 +- .../stdlib/collection/WithFilter.scala | 70 + .../collection/concurrent/BasicNode.java | 19 + .../collection/concurrent/CNodeBase.java | 37 + .../stdlib/collection/concurrent/Gen.java | 15 + .../collection/concurrent/INodeBase.java | 39 + .../collection/concurrent/MainNode.java | 46 + .../stdlib/collection/concurrent/Map.scala | 192 ++ .../collection/concurrent/TrieMap.scala | 1202 ++++++++ .../collection/convert/AsJavaConverters.scala | 260 ++ .../collection/convert/AsJavaExtensions.scala | 108 + .../convert/AsScalaConverters.scala | 207 ++ .../convert/AsScalaExtensions.scala | 93 + .../convert/ImplicitConversions.scala | 181 ++ .../convert/JavaCollectionWrappers.scala | 614 ++++ .../collection/convert/StreamExtensions.scala | 480 ++++ .../convert/impl/ArrayStepper.scala | 79 + .../convert/impl/BinaryTreeStepper.scala | 248 ++ .../convert/impl/BitSetStepper.scala | 118 + .../convert/impl/ChampStepper.scala | 245 ++ .../convert/impl/InOrderStepperBase.scala | 53 + .../convert/impl/IndexedSeqStepper.scala | 44 + .../convert/impl/IndexedStepperBase.scala | 40 + .../convert/impl/IteratorStepper.scala | 129 + .../convert/impl/NumericRangeStepper.scala | 38 + .../convert/impl/RangeStepper.scala | 40 + .../convert/impl/StringStepper.scala | 58 + .../convert/impl/TableStepper.scala | 138 + .../convert/impl/VectorStepper.scala | 131 + .../collection/generic/BitOperations.scala | 50 + .../generic/DefaultSerializationProxy.scala | 87 + .../collection/generic/IsIterable.scala | 164 ++ .../collection/generic/IsIterableOnce.scala | 71 + .../stdlib/collection/generic/IsMap.scala | 114 + .../stdlib/collection/generic/IsSeq.scala | 114 + .../collection/generic/Subtractable.scala | 62 + .../stdlib/collection/generic/package.scala | 34 + .../collection/immutable/ArraySeq.scala | 685 +++++ .../stdlib/collection/immutable/BitSet.scala | 375 +++ .../collection/immutable/ChampCommon.scala | 252 ++ .../stdlib/collection/immutable/HashMap.scala | 2423 ++++++++++++++++ .../stdlib/collection/immutable/HashSet.scala | 2123 ++++++++++++++ .../stdlib/collection/immutable/IntMap.scala | 502 ++++ .../collection/immutable/Iterable.scala | 2 - .../collection/immutable/LazyList.scala | 1381 +++++++++ .../stdlib/collection/immutable/List.scala | 25 +- .../stdlib/collection/immutable/ListMap.scala | 371 +++ .../stdlib/collection/immutable/ListSet.scala | 138 + .../stdlib/collection/immutable/LongMap.scala | 490 ++++ .../stdlib/collection/immutable/Map.scala | 692 +++++ .../collection/immutable/NumericRange.scala | 507 ++++ .../stdlib/collection/immutable/Queue.scala | 217 ++ .../stdlib/collection/immutable/Range.scala | 672 +++++ .../collection/immutable/RedBlackTree.scala | 1231 ++++++++ .../stdlib/collection/immutable/Seq.scala | 14 +- .../stdlib/collection/immutable/SeqMap.scala | 276 ++ .../stdlib/collection/immutable/Set.scala | 398 +++ .../collection/immutable/SortedMap.scala | 177 ++ .../collection/immutable/SortedSet.scala | 57 + .../stdlib/collection/immutable/Stream.scala | 568 ++++ .../immutable/StrictOptimizedSeqOps.scala | 80 + .../stdlib/collection/immutable/TreeMap.scala | 370 +++ .../collection/immutable/TreeSeqMap.scala | 649 +++++ .../stdlib/collection/immutable/TreeSet.scala | 296 ++ .../stdlib/collection/immutable/Vector.scala | 2474 +++++++++++++++++ .../collection/immutable/VectorMap.scala | 275 ++ .../collection/immutable/WrappedString.scala | 140 + .../stdlib/collection/immutable/package.scala | 29 + .../stdlib/collection/mutable/AnyRefMap.scala | 601 ++++ .../collection/mutable/ArrayBuffer.scala | 403 +++ .../collection/mutable/ArrayBuilder.scala | 522 ++++ .../collection/mutable/ArrayDeque.scala | 645 +++++ .../stdlib/collection/mutable/ArraySeq.scala | 347 +++ .../stdlib/collection/mutable/BitSet.scala | 392 +++ .../stdlib/collection/mutable/Buffer.scala | 19 +- .../stdlib/collection/mutable/Builder.scala | 14 +- .../mutable/CheckedIndexedSeqView.scala | 117 + .../stdlib/collection/mutable/Cloneable.scala | 22 + .../mutable/CollisionProofHashMap.scala | 888 ++++++ .../stdlib/collection/mutable/Growable.scala | 8 +- .../collection/mutable/GrowableBuilder.scala | 37 + .../stdlib/collection/mutable/HashMap.scala | 654 +++++ .../stdlib/collection/mutable/HashSet.scala | 456 +++ .../stdlib/collection/mutable/HashTable.scala | 417 +++ .../collection/mutable/ImmutableBuilder.scala | 31 + .../collection/mutable/IndexedSeq.scala | 83 + .../stdlib/collection/mutable/Iterable.scala | 5 +- .../collection/mutable/LinkedHashMap.scala | 509 ++++ .../collection/mutable/LinkedHashSet.scala | 348 +++ .../collection/mutable/ListBuffer.scala | 20 +- .../stdlib/collection/mutable/ListMap.scala | 82 + .../stdlib/collection/mutable/LongMap.scala | 673 +++++ .../stdlib/collection/mutable/Map.scala | 268 ++ .../stdlib/collection/mutable/MultiMap.scala | 115 + .../collection/mutable/MutationTracker.scala | 3 +- .../collection/mutable/OpenHashMap.scala | 306 ++ .../collection/mutable/PriorityQueue.scala | 402 +++ .../stdlib/collection/mutable/Queue.scala | 138 + .../collection/mutable/RedBlackTree.scala | 652 +++++ .../collection/mutable/ReusableBuilder.scala | 55 + .../stdlib/collection/mutable/Seq.scala | 1 - .../stdlib/collection/mutable/SeqMap.scala | 38 + .../stdlib/collection/mutable/Set.scala | 122 + .../collection/mutable/Shrinkable.scala | 5 +- .../stdlib/collection/mutable/SortedMap.scala | 103 + .../stdlib/collection/mutable/SortedSet.scala | 48 + .../stdlib/collection/mutable/Stack.scala | 142 + .../collection/mutable/StringBuilder.scala | 7 +- .../stdlib/collection/mutable/TreeMap.scala | 257 ++ .../stdlib/collection/mutable/TreeSet.scala | 218 ++ .../collection/mutable/UnrolledBuffer.scala | 442 +++ .../collection/mutable/WeakHashMap.scala | 55 + .../stdlib/collection/mutable/package.scala | 41 + .../stdlib/collection/package.scala | 80 + 152 files changed, 40745 insertions(+), 707 deletions(-) delete mode 100644 tests/pos-special/stdlib/Test1.scala delete mode 100644 tests/pos-special/stdlib/Test2.scala create mode 100644 tests/pos-special/stdlib/collection/ArrayOps.scala create mode 100644 tests/pos-special/stdlib/collection/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/BufferedIterator.scala create mode 100644 tests/pos-special/stdlib/collection/BuildFrom.scala create mode 100644 tests/pos-special/stdlib/collection/DefaultMap.scala create mode 100644 tests/pos-special/stdlib/collection/Factory.scala create mode 100644 tests/pos-special/stdlib/collection/Hashing.scala create mode 100644 tests/pos-special/stdlib/collection/IndexedSeqView.scala create mode 100644 tests/pos-special/stdlib/collection/JavaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/LazyZipOps.scala create mode 100644 tests/pos-special/stdlib/collection/MapView.scala create mode 100644 tests/pos-special/stdlib/collection/Searching.scala create mode 100644 tests/pos-special/stdlib/collection/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/SeqView.scala create mode 100644 tests/pos-special/stdlib/collection/Set.scala create mode 100644 tests/pos-special/stdlib/collection/SortedMap.scala create mode 100644 tests/pos-special/stdlib/collection/SortedOps.scala create mode 100644 tests/pos-special/stdlib/collection/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/Stepper.scala create mode 100644 tests/pos-special/stdlib/collection/StepperShape.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala create mode 100644 tests/pos-special/stdlib/collection/StringParsers.scala create mode 100644 tests/pos-special/stdlib/collection/WithFilter.scala create mode 100644 tests/pos-special/stdlib/collection/concurrent/BasicNode.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/CNodeBase.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/Gen.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/INodeBase.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/MainNode.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/Map.scala create mode 100644 tests/pos-special/stdlib/collection/concurrent/TrieMap.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala create mode 100644 tests/pos-special/stdlib/collection/convert/StreamExtensions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala create mode 100644 tests/pos-special/stdlib/collection/generic/BitOperations.scala create mode 100644 tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsIterable.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsMap.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsSeq.scala create mode 100644 tests/pos-special/stdlib/collection/generic/Subtractable.scala create mode 100644 tests/pos-special/stdlib/collection/generic/package.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ArraySeq.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ChampCommon.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/HashMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/HashSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/IntMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/LazyList.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ListMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ListSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/LongMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Map.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/NumericRange.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Queue.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Range.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Set.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SortedMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Stream.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Vector.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/VectorMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/WrappedString.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/package.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArraySeq.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Cloneable.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/HashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/HashSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/HashTable.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ListMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LongMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Map.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/MultiMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Queue.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Set.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SortedMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Stack.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/TreeMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/TreeSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/package.scala create mode 100644 tests/pos-special/stdlib/collection/package.scala diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 798e998ef241..fa89c82fc7e7 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -44,7 +44,7 @@ class CompilationTests { // Run tests for legacy lazy vals compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), - compileDir("tests/pos-special/stdlib", defaultOptions), + compileDir("tests/pos-special/stdlib", allowDeepSubtypes), ) if scala.util.Properties.isJavaAtLeast("16") then diff --git a/tests/pos-special/stdlib/Test1.scala b/tests/pos-special/stdlib/Test1.scala deleted file mode 100644 index 9ee4e7cfa6a1..000000000000 --- a/tests/pos-special/stdlib/Test1.scala +++ /dev/null @@ -1,34 +0,0 @@ -import language.experimental.captureChecking -import collection.{View, Seq} -import collection.mutable.{ArrayBuffer, ListBuffer} - -import java.io.* - -object Test0: - - def usingLogFile[sealed T](op: FileOutputStream^ => T): T = - val logFile = FileOutputStream("log") - val result = op(logFile) - logFile.close() - result - - def test(xs: List[Int]) = - usingLogFile: f => - xs.map: x => - f.write(x) - x * x - -object Test1: - def test(it: Iterator[Int]^, v: View[Int]^) = - val isEven: Int ->{cap[test]} Boolean = _ % 2 == 0 - val it2 = it.filter(isEven) - val _: Iterator[Int]^{it, isEven} = it2 - val it2c: Iterator[Int]^{it2} = it2 - val v2 = v.filter(isEven) - val _: View[Int]^{v, isEven} = v2 - val v2c: View[Int]^{v2} = v2 - val v3 = v.drop(2) - val _: View[Int]^{v} = v3 - val v3c: View[Int]^{v3} = v3 - val (xs6, xs7) = v.partition(isEven) - val (xs6a, xs7a) = v.partition(_ % 2 == 0) diff --git a/tests/pos-special/stdlib/Test2.scala b/tests/pos-special/stdlib/Test2.scala deleted file mode 100644 index a59da522b183..000000000000 --- a/tests/pos-special/stdlib/Test2.scala +++ /dev/null @@ -1,232 +0,0 @@ -import scala.reflect.ClassTag -import language.experimental.captureChecking -import collection.{View, Seq} -import collection.mutable.{ArrayBuffer, ListBuffer} - -object Test { - - def seqOps(xs: Seq[Int]) = { // try with Seq[Int]^{cap} - val strPlusInt: (String, Int) => String = _ + _ - val intPlusStr: (Int, String) => String = _ + _ - val isEven: Int => Boolean = _ % 2 == 0 - val isNonNeg: Int => Boolean = _ > 0 - val flips: Int => List[Int] = x => x :: -x :: Nil - val x1 = xs.foldLeft("")(strPlusInt) - val y1: String = x1 - val x2 = xs.foldRight("")(intPlusStr) - val y2: String = x2 - val x3 = xs.indexWhere(isEven) - val y3: Int = x3 - val x4 = xs.head - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Int] = x5 - val (xs6, xs7) = xs.partition(isEven) - val ys6: Seq[Int] = xs6 - val ys7: Seq[Int] = xs7 - val xs8 = xs.drop(2) - val ys8: Seq[Int] = xs8 - val xs9 = xs.map(isNonNeg) - val ys9: Seq[Boolean] = xs9 - val xs10 = xs.flatMap(flips) - val ys10: Seq[Int] = xs10 - val xs11 = xs ++ xs - val ys11: Seq[Int] = xs11 - val xs12 = xs ++ Nil - val ys12: Seq[Int] = xs12 - val xs13 = Nil ++ xs - val ys13: Seq[Int] = xs13 - val xs14 = xs ++ ("a" :: Nil) - val ys14: Seq[Any] = xs14 - val xs15 = xs.zip(xs9) - val ys15: Seq[(Int, Boolean)] = xs15 - val xs16 = xs.reverse - val ys16: Seq[Int] = xs16 - println("-------") - println(x1) - println(x2) - println(x3) - println(x4) - println(x5) - println(xs6) - println(xs7) - println(xs8) - println(xs9) - println(xs10) - println(xs11) - println(xs12) - println(xs13) - println(xs14) - println(xs15) - println(xs16) - } - - def iterOps(xs: => Iterator[Int]^) = - val strPlusInt: (String, Int) => String = _ + _ - val intPlusStr: (Int, String) => String = _ + _ - val isEven: Int ->{cap[iterOps]} Boolean = _ % 2 == 0 - val isNonNeg: Int => Boolean = _ > 0 - val flips: Int => List[Int] = x => x :: -x :: Nil - val x1 = xs.foldLeft("")(strPlusInt) - val y1: String = x1 - val x2 = xs.foldRight("")(intPlusStr) - val y2: String = x2 - val x4 = xs.next() - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Int] = x5 - val (xs6, xs7) = xs.partition(isEven) - val ys6: Iterator[Int]^{xs6, isEven} = xs6 - val ys7: Iterator[Int]^{xs7, isEven} = xs7 - val (xs6a, xs7a) = xs.partition(_ % 2 == 0) - val ys6a: Iterator[Int]^{xs6} = xs6 - val ys7a: Iterator[Int]^{xs7} = xs7 - val xs8 = xs.drop(2) - val ys8: Iterator[Int]^{xs8} = xs8 - val xs9 = xs.map(isNonNeg) - val ys9: Iterator[Boolean]^{xs9} = xs9 - val xs10 = xs.flatMap(flips) - val ys10: Iterator[Int]^{xs10} = xs10 - val xs11 = xs ++ xs - val ys11: Iterator[Int]^{xs11} = xs11 - val xs12 = xs ++ Nil - val ys12: Iterator[Int]^{xs12} = xs12 - val xs13 = Nil ++ xs - val ys13: List[Int] = xs13 - val xs14 = xs ++ ("a" :: Nil) - val ys14: Iterator[Any]^{xs14} = xs14 - val xs15 = xs.zip(xs9) - val ys15: Iterator[(Int, Boolean)]^{xs15} = xs15 - println("-------") - println(x1) - println(x2) - println(x4) - println(x5) - println(xs6.to(List)) - println(xs7.to(List)) - println(xs8.to(List)) - println(xs9.to(List)) - println(xs10.to(List)) - println(xs11.to(List)) - println(xs12.to(List)) - println(xs13.to(List)) - println(xs14.to(List)) - println(xs15.to(List)) - - def viewOps(xs: View[Int]^) = { - val strPlusInt: (String, Int) => String = _ + _ - val intPlusStr: (Int, String) => String = _ + _ - val isEven: Int ->{cap[viewOps]} Boolean = _ % 2 == 0 - val isNonNeg: Int => Boolean = _ > 0 - val flips: Int => List[Int] = x => x :: -x :: Nil - val x1 = xs.foldLeft("")(strPlusInt) - val y1: String = x1 - val x2 = xs.foldRight("")(intPlusStr) - val y2: String = x2 - //val x3 = xs.indexWhere(_ % 2 == 0) // indexWhere does not exist on View - //val y3: Int = x3 - val x4 = xs.head - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Int] = x5 - val (xs6, xs7) = xs.partition(isEven) - val ys6: View[Int]^{xs6, isEven} = xs6 - val ys7: View[Int]^{xs7, isEven} = xs7 - val (xs6a, xs7a) = xs.partition(_ % 2 == 0) - val ys6a: View[Int]^{xs6} = xs6 - val ys7a: View[Int]^{xs7} = xs7 - val xs8 = xs.drop(2) - val ys8: View[Int]^{xs8} = xs8 - val xs9 = xs.map(isNonNeg) - val ys9: View[Boolean]^{xs9} = xs9 - val xs10 = xs.flatMap(flips) - val ys10: View[Int]^{xs10} = xs10 - val xs11 = xs ++ xs - val ys11: View[Int]^{xs11} = xs11 - val xs12 = xs ++ Nil - val ys12: View[Int]^{xs12} = xs12 - val xs13 = Nil ++ xs - val ys13: List[Int] = xs13 - val xs14 = xs ++ ("a" :: Nil) - val ys14: View[Any]^{xs14} = xs14 - val xs15 = xs.zip(xs9) - val ys15: View[(Int, Boolean)]^{xs15} = xs15 - println("-------") - println(x1) - println(x2) - println(x4) - println(x5) - println(xs6.to(List)) - println(xs7.to(List)) - println(xs8.to(List)) - println(xs9.to(List)) - println(xs10.to(List)) - println(xs11.to(List)) - println(xs12.to(List)) - println(xs13.to(List)) - println(xs14.to(List)) - println(xs15.to(List)) - } - - def stringOps(xs: String) = { - val x1 = xs.foldLeft("")(_ + _) - val y1: String = x1 - val x2 = xs.foldRight("")(_ + _) - val y2: String = x2 - val x3 = xs.indexWhere(_ % 2 == 0) - val y3: Int = x3 - val x4 = xs.head - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Char] = x5 - val (xs6, xs7) = xs.partition(_ % 2 == 0) - val ys6: String = xs6 - val ys7: String = xs7 - val xs8 = xs.drop(2) - val ys8: String = xs8 - val xs9 = xs.map(_ + 1) - val ys9: Seq[Int] = xs9 - val xs9a = xs.map(_.toUpper) - val ys9a: String = xs9a - val xs10 = xs.flatMap((x: Char) => s"$x,$x") - val ys10: String = xs10 - val xs11 = xs ++ xs - val ys11: String = xs11 - val ops = collection.StringOps(xs) // !!! otherwise we can a "cannot establish reference" - val xs13 = Nil ++ ops.iterator - val ys13: List[Char] = xs13 - val xs14 = xs ++ ("xyz" :: Nil) - val ys14: Seq[Any] = xs14 - val xs15 = xs.zip(xs9) - val ys15: Seq[(Char, Int)] = xs15 - println("-------") - println(x1) - println(x2) - println(x3) - println(x4) - println(x5) - println(xs6) - println(xs7) - println(xs8) - println(xs9) - println(xs9a) - println(xs10) - println(xs11) - println(xs13) - println(xs14) - println(xs15) - } - - def main(args: Array[String]) = { - val ints = List(1, 2, 3) - val intsBuf = ints.to(ArrayBuffer) - val intsListBuf = ints.to(ListBuffer) - val intsView = ints.view - seqOps(ints) - seqOps(intsBuf) - seqOps(intsListBuf) - viewOps(intsView) - iterOps(ints.iterator) - stringOps("abc") - } -} diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala new file mode 100644 index 000000000000..485427886625 --- /dev/null +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -0,0 +1,1663 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + copyToArray(destination, 0) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala new file mode 100644 index 000000000000..e8ca89806455 --- /dev/null +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -0,0 +1,348 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.Builder + + +/** Base type of bitsets. + * + * This trait provides most of the operations of a `BitSet` independently of its representation. + * It is inherited by all concrete implementations of bitsets. + * + * @define bitsetinfo + * Bitsets are sets of non-negative integers which are represented as + * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is + * determined by the largest number stored in it. + * @define coll bitset + * @define Coll `BitSet` + */ +trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "BitSet" + override def unsorted: Set[Int] = this +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." + + def empty: BitSet = immutable.BitSet.empty + def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder + def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + + @SerialVersionUID(3L) + private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { + + @transient protected var elems: Array[Long] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val nwords = coll.nwords + out.writeInt(nwords) + var i = 0 + while(i < nwords) { + out.writeLong(coll.word(i)) + i += 1 + } + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val nwords = in.readInt() + elems = new Array[Long](nwords) + var i = 0 + while(i < nwords) { + elems(i) = in.readLong() + i += 1 + } + } + + protected[this] def readResolve(): Any + } +} + +/** Base implementation type of bitsets */ +trait BitSetOps[+C <: BitSet with BitSetOps[C]] + extends SortedSetOps[Int, SortedSet, C] { self => + import BitSetOps._ + + def bitSetFactory: SpecificIterableFactory[Int, C] + + def unsorted: Set[Int] + + final def ordering: Ordering[Int] = Ordering.Int + + /** The number of words (each with 64 bits) making up the set */ + protected[collection] def nwords: Int + + /** The words at index `idx`, or 0L if outside the range of the set + * '''Note:''' requires `idx >= 0` + */ + protected[collection] def word(idx: Int): Long + + /** Creates a new set of this kind from an array of longs + */ + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C + + def contains(elem: Int): Boolean = + 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L + + def iterator: Iterator[Int] = iteratorFrom(0) + + def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var currentPos = if (start > 0) start >> LogWL else 0 + private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) + final override def hasNext: Boolean = { + while (currentWord == 0) { + if (currentPos + 1 >= nwords) return false + currentPos += 1 + currentWord = word(currentPos) + } + true + } + final override def next(): Int = { + if (hasNext) { + val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) + currentWord &= currentWord - 1 + (currentPos << LogWL) + bitPos + } else Iterator.empty.next() + } + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = scala.collection.convert.impl.BitSetStepper.from(this) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def size: Int = { + var s = 0 + var i = nwords + while (i > 0) { + i -= 1 + s += java.lang.Long.bitCount(word(i)) + } + s + } + + override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) + + @inline private[this] def smallestInt: Int = { + val thisnwords = nwords + var i = 0 + while(i < thisnwords) { + val currentWord = word(i) + if (currentWord != 0L) { + return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) + } + i += 1 + } + throw new UnsupportedOperationException("empty.smallestInt") + } + + @inline private[this] def largestInt: Int = { + var i = nwords - 1 + while(i >= 0) { + val currentWord = word(i) + if (currentWord != 0L) { + return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 + } + i -= 1 + } + throw new UnsupportedOperationException("empty.largestInt") + } + + override def max[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) largestInt + else if (Ordering.Int isReverseOf ord) smallestInt + else super.max(ord) + + + override def min[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) smallestInt + else if (Ordering.Int isReverseOf ord) largestInt + else super.min(ord) + + override def foreach[U](f: Int => U): Unit = { + /* NOTE: while loops are significantly faster as of 2.11 and + one major use case of bitsets is performance. Also, there + is nothing to do when all bits are clear, so use that as + the inner loop condition. */ + var i = 0 + while (i < nwords) { + var w = word(i) + var j = i * WordLength + while (w != 0L) { + if ((w&1L) == 1L) f(j) + w = w >>> 1 + j += 1 + } + i += 1 + } + } + + /** Creates a bit mask for this set as a new array of longs + */ + def toBitMask: Array[Long] = { + val a = new Array[Long](nwords) + var i = a.length + while(i > 0) { + i -= 1 + a(i) = word(i) + } + a + } + + def rangeImpl(from: Option[Int], until: Option[Int]): C = { + val a = coll.toBitMask + val len = a.length + if (from.isDefined) { + val f = from.get + val w = f >> LogWL + val b = f & (WordLength - 1) + if (w >= 0) { + java.util.Arrays.fill(a, 0, math.min(w, len), 0) + if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) + } + } + if (until.isDefined) { + val u = until.get + val w = u >> LogWL + val b = u & (WordLength - 1) + if (w < len) { + java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) + if (w >= 0) a(w) &= (1L << b) - 1 + } + } + coll.fromBitMaskNoCopy(a) + } + + override def concat(other: collection.IterableOnce[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords max otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) | otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.concat(other) + } + + override def intersect(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords min otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.intersect(other) + } + + abstract override def diff(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & ~otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.diff(other) + } + + /** Computes the symmetric difference of this bitset and another bitset by performing + * a bitwise "exclusive-or". + * + * @param other the other bitset to take part in the symmetric difference. + * @return a bitset containing those bits of this + * bitset or the other bitset that are not contained in both bitsets. + */ + def xor(other: BitSet): C = { + val len = coll.nwords max other.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = coll.word(idx) ^ other.word(idx) + coll.fromBitMaskNoCopy(words) + } + + @`inline` final def ^ (other: BitSet): C = xor(other) + + /** + * Builds a new bitset by applying a function to all elements of this bitset + * @param f the function to apply to each element. + * @return a new bitset resulting from applying the given function ''f'' to + * each element of this bitset and collecting the results + */ + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) + + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) + + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) + + override def partition(p: Int => Boolean): (C, C) = { + val left = filter(p) + (left, this &~ left) + } +} + +object BitSetOps { + + /* Final vals can sometimes be inlined as constants (faster) */ + private[collection] final val LogWL = 6 + private[collection] final val WordLength = 64 + private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 + + private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { + var len = elems.length + while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 + var newlen = len + if (idx >= newlen && w != 0L) newlen = idx + 1 + val newelems = new Array[Long](newlen) + Array.copy(elems, 0, newelems, 0, len) + if (idx < newlen) newelems(idx) = w + else assert(w == 0L) + newelems + } + + private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = + if (oldWord == 0L) 0L else { + var w = oldWord + val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) + var jmask = 1L << trailingZeroes + var j = wordIndex * BitSetOps.WordLength + trailingZeroes + val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) + while (j != maxJ) { + if ((w & jmask) != 0L) { + if (pred(j) == isFlipped) { + // j did not pass the filter here + w = w & ~jmask + } + } + jmask = jmask << 1 + j += 1 + } + w + } +} diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala new file mode 100644 index 000000000000..bc35ee0a25da --- /dev/null +++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +/** Buffered iterators are iterators which provide a method `head` + * that inspects the next element without discarding it. + */ +trait BufferedIterator[+A] extends Iterator[A] { + + /** Returns next element of iterator without advancing beyond it. + */ + def head: A + + /** Returns an option of the next element of an iterator without advancing beyond it. + * @return the next element of this iterator if it has a next element + * `None` if it does not + */ + def headOption : Option[A] = if (hasNext) Some(head) else None + + override def buffered: this.type = this +} diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala new file mode 100644 index 000000000000..bc9c49d9493c --- /dev/null +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import scala.collection.mutable.Builder +import scala.collection.immutable.WrappedString +import scala.reflect.ClassTag + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") +trait BuildFrom[-From, -A, +C] extends Any { self => + def fromSpecific(from: From)(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) + + /** Partially apply a BuildFrom to a Factory */ + def toFactory(from: From): Factory[A, C] = new Factory[A, C] { + def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def newBuilder: Builder[A, C] = self.newBuilder(from) + } +} + +object BuildFrom extends BuildFromLowPriority1 { + + /** Build the source collection type from a MapOps */ + implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + } + + /** Build the source collection type from a SortedMapOps */ + implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + } + + implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = + new BuildFrom[C, Int, C] { + def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder + } + + implicit val buildFromString: BuildFrom[String, Char, String] = + new BuildFrom[String, Char, String] { + def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder + } + + implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = + new BuildFrom[WrappedString, Char, WrappedString] { + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder + } + + implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + new BuildFrom[Array[_], A, Array[A]] { + def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder + } + + implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + new BuildFrom[View[A], B, View[B]] { + def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder + } + +} + +trait BuildFromLowPriority1 extends BuildFromLowPriority2 { + + /** Build the source collection type from an Iterable with SortedOps */ + // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the + // implicit search space for faster compilation and reduced change of divergence. See the compilation + // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 + implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + } + + implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = + new BuildFrom[String, A, immutable.IndexedSeq[A]] { + def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] + } +} + +trait BuildFromLowPriority2 { + /** Build the source collection type from an IterableOps */ + implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + } + + implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { + def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala new file mode 100644 index 000000000000..cbc61d8c0268 --- /dev/null +++ b/tests/pos-special/stdlib/collection/DefaultMap.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + + +/** A default map which builds a default `immutable.Map` implementation for all + * transformations. + */ +@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") +trait DefaultMap[K, +V] extends Map[K, V] diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala new file mode 100644 index 000000000000..2b15f1cc15d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala new file mode 100644 index 000000000000..4e1fd872b8b5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Hashing.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + + +protected[collection] object Hashing { + + def elemHashCode(key: Any): Int = key.## + + def improve(hcode: Int): Int = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) + } + + def computeHash(key: Any): Int = + improve(elemHashCode(key)) + + /** + * Utility method to keep a subset of all bits in a given bitmap + * + * Example + * bitmap (binary): 00000001000000010000000100000001 + * keep (binary): 1010 + * result (binary): 00000001000000000000000100000000 + * + * @param bitmap the bitmap + * @param keep a bitmask containing which bits to keep + * @return the original bitmap with all bits where keep is not 1 set to 0 + */ + def keepBits(bitmap: Int, keep: Int): Int = { + var result = 0 + var current = bitmap + var kept = keep + while (kept != 0) { + // lowest remaining bit in current + val lsb = current ^ (current & (current - 1)) + if ((kept & 1) != 0) { + // mark bit in result bitmap + result |= lsb + } + // clear lowest remaining one bit in abm + current &= ~lsb + // look at the next kept bit + kept >>>= 1 + } + result + } + +} diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index 6e8e2bd0dc66..a82d5384779a 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -17,7 +17,6 @@ import scala.annotation.{nowarn, tailrec} import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering -import language.experimental.captureChecking /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -104,7 +103,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def knownSize: Int = length - override final def lengthCompare(that: Iterable[_]^): Int = { + override final def lengthCompare(that: Iterable[_]): Int = { val res = that.sizeCompare(length) // can't just invert the result, because `-Int.MinValue == Int.MinValue` if (res == Int.MinValue) 1 else -res diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala new file mode 100644 index 000000000000..737f032d2060 --- /dev/null +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -0,0 +1,180 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn + + +/** View defined in terms of indexing a range */ +trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] { self => + + override def view: IndexedSeqView[A] = this + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) + + override def iterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A] = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A] = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B] = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A] = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(prefix, this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeqView" +} + +object IndexedSeqView { + + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var current = 0 + private[this] var remainder = self.length + override def knownSize: Int = remainder + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + + def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value + + val formatFrom = formatRange(from) + val formatUntil = formatRange(until) + remainder = Math.max(0, formatUntil - formatFrom) + current = current + formatFrom + this + } + } + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var remainder = self.length + private[this] var pos = remainder - 1 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(pos) + pos -= 1 + remainder -= 1 + r + } else Iterator.empty.next() + + // from < 0 means don't move pos, until < 0 means don't limit remainder + // + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + if (_hasNext) { + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until + } + else { + pos -= from // skip ahead + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } + else remainder -= from // ...otherwise just less the skip + } + } + this + } + } + + /** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqOps[A] = IndexedSeqOps[A, AnyConstr, _] + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A]) + extends SeqView.Id(underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A) + extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]) + extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A]) + extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.Take(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B) + extends SeqView.Map(underlying, f) with IndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A]) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int) extends AbstractIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} + +/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 85c0debc6685..04647f215963 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -17,7 +17,6 @@ import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} -import language.experimental.captureChecking /** Base trait for generic collections. * @@ -29,7 +28,6 @@ import language.experimental.captureChecking trait Iterable[+A] extends IterableOnce[A] with IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { - this: Iterable[A]^ => // The collection itself @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") @@ -134,31 +132,29 @@ trait Iterable[+A] extends IterableOnce[A] * and may be nondeterministic. */ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { - this: IterableOps[A, CC, C]^ => - /** * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. */ // Should be `protected def asIterable`, or maybe removed altogether if it's not needed @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") - def toIterable: Iterable[A]^{this} + def toIterable: Iterable[A] /** Converts this $coll to an unspecified Iterable. Will return * the same collection if this instance is already Iterable. * @return An Iterable containing all elements of this $coll. */ @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") - final def toTraversable: Traversable[A]^{this} = toIterable + final def toTraversable: Traversable[A] = toIterable override def isTraversableAgain: Boolean = true /** * @return This collection as a `C`. */ - protected def coll: C^{this} + protected def coll: C @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") - final def repr: C^{this} = coll + final def repr: C = coll /** * Defines how to turn a given `Iterable[A]` into a collection of type `C`. @@ -178,7 +174,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * `Iterable[A]` obtained from `this` collection (as it is the case in the * implementations of operations where we use a `View[A]`), it is safe. */ - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): C^{coll} + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C /** The companion object of this ${coll}, providing various factory methods. * @@ -255,7 +251,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable def lastOption: Option[A] = if (isEmpty) None else Some(last) /** A view over the elements of this collection. */ - def view: View[A]^{this} = View.fromIteratorProvider(() => iterator) + def view: View[A] = View.fromIteratorProvider(() => iterator) /** Compares the size of this $coll to a test value. * @@ -305,7 +301,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * this.sizeIs > size // this.sizeCompare(size) > 0 * }}} */ - @inline final def sizeIs: IterableOps.SizeCompareOps^{this} = new IterableOps.SizeCompareOps(this) + @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) /** Compares the size of this $coll to the size of another `Iterable`. * @@ -321,7 +317,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def sizeCompare(that: Iterable[_]^): Int = { + def sizeCompare(that: Iterable[_]): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this sizeCompare thatKnownSize @@ -346,7 +342,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** A view over a slice of the elements of this collection. */ @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - def view(from: Int, until: Int): View[A]^{this} = view.slice(from, until) + def view(from: Int, until: Int): View[A] = view.slice(from, until) /** Transposes this $coll of iterable collections into * a $coll of ${coll}s. @@ -382,7 +378,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @throws IllegalArgumentException if all collections in this $coll * are not of the same size. */ - def transpose[B](implicit asIterable: A -> /*<: /*<: Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + def filter(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) - def filterNot(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) /** Creates a non-strict filter of this $coll. * @@ -421,7 +417,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * All these operations apply to those elements of this $coll * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): collection.WithFilter[A, CC]^{this, p} = new IterableOps.WithFilter(this, p) + def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) /** A pair of, first, all elements that satisfy predicate `p` and, second, * all elements that do not. Interesting because it splits a collection in two. @@ -430,15 +426,15 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, * which requires only a single traversal. */ - def partition(p: A => Boolean): (C^{this, p}, C^{this, p}) = { + def partition(p: A => Boolean): (C, C) = { val first = new View.Filter(this, p, false) val second = new View.Filter(this, p, true) (fromSpecific(first), fromSpecific(second)) } - override def splitAt(n: Int): (C^{this}, C^{this}) = (take(n), drop(n)) + override def splitAt(n: Int): (C, C) = (take(n), drop(n)) - def take(n: Int): C^{this} = fromSpecific(new View.Take(this, n)) + def take(n: Int): C = fromSpecific(new View.Take(this, n)) /** Selects the last ''n'' elements. * $orderDependent @@ -447,7 +443,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def takeRight(n: Int): C^{this} = fromSpecific(new View.TakeRight(this, n)) + def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -455,11 +451,11 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.TakeWhile(this, p)) + def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) - def span(p: A => Boolean): (C^{this, p}, C^{this, p}) = (takeWhile(p), dropWhile(p)) + def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) - def drop(n: Int): C^{this} = fromSpecific(new View.Drop(this, n)) + def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) /** Selects all elements except last ''n'' ones. * $orderDependent @@ -468,9 +464,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def dropRight(n: Int): C^{this} = fromSpecific(new View.DropRight(this, n)) + def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) - def dropWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.DropWhile(this, p)) + def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) /** Partitions elements in fixed size ${coll}s. * @see [[scala.collection.Iterator]], method `grouped` @@ -479,7 +475,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return An iterator producing ${coll}s of size `size`, except the * last will be less than size `size` if the elements don't divide evenly. */ - def grouped(size: Int): Iterator[C^{this}]^{this} = + def grouped(size: Int): Iterator[C] = iterator.grouped(size).map(fromSpecific) /** Groups elements in fixed size blocks by passing a "sliding window" @@ -501,7 +497,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` */ - def sliding(size: Int): Iterator[C^{this}]^{this} = sliding(size, 1) + def sliding(size: Int): Iterator[C] = sliding(size, 1) /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) @@ -520,13 +516,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * element (which may be the only element) will be smaller * if there are fewer than `size` elements remaining to be grouped. * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` - * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` */ - def sliding(size: Int, step: Int): Iterator[C^{this}]^{this} = + def sliding(size: Int, step: Int): Iterator[C] = iterator.sliding(size, step).map(fromSpecific) /** The rest of the collection without its first element. */ - def tail: C^{this} = { + def tail: C = { if (isEmpty) throw new UnsupportedOperationException drop(1) } @@ -534,12 +530,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** The initial part of the collection without its last element. * $willForceEvaluation */ - def init: C^{this} = { + def init: C = { if (isEmpty) throw new UnsupportedOperationException dropRight(1) } - def slice(from: Int, until: Int): C^{this} = + def slice(from: Int, until: Int): C = fromSpecific(new View.Drop(new View.Take(this, until), from)) /** Partitions this $coll into a map of ${coll}s according to some discriminator function. @@ -649,9 +645,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * * @return a new $coll containing the prefix scan of the elements in this $coll */ - def scan[B >: A](z: B)(op: (B, B) => B): CC[B]^{this, op} = scanLeft(z)(op) + def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) - def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} = iterableFactory.from(new View.ScanLeft(this, z, op)) + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) /** Produces a collection containing cumulative results of applying the operator going right to left. * The head of the collection is the last cumulative result. @@ -669,7 +665,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanRight[B](z: B)(op: (A, B) => B): CC[B]^{this, op} = { + def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { class Scanner extends runtime.AbstractFunction1[A, Unit] { var acc = z var scanned = acc :: immutable.Nil @@ -683,13 +679,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable iterableFactory.from(scanner.scanned) } - def map[B](f: A => B): CC[B]^{this, f} = iterableFactory.from(new View.Map(this, f)) + def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = iterableFactory.from(new View.FlatMap(this, f)) + def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) - def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} = flatMap(asIterable) + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) - def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} = + def collect[B](pf: PartialFunction[A, B]): CC[B] = iterableFactory.from(new View.Collect(this, pf)) /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one @@ -710,12 +706,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @tparam A2 the element type of the second resulting collection * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] * - * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1]^{this, f}, CC[A2]^{this, f}) = { - val left: View[A1]^{f, this} = new LeftPartitionMapped(this, f) - val right: View[A2]^{f, this} = new RightPartitionMapped(this, f) + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val left: View[A1] = new LeftPartitionMapped(this, f) + val right: View[A2] = new RightPartitionMapped(this, f) (iterableFactory.from(left), iterableFactory.from(right)) } @@ -728,13 +724,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = iterableFactory.from(suffix match { + def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { case xs: Iterable[B] => new View.Concat(this, xs) case xs => iterator ++ suffix.iterator }) /** Alias for `concat` */ - @`inline` final def ++ [B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = concat(suffix) + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -745,12 +741,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. * The length of the returned collection is the minimum of the lengths of this $coll and `that`. */ - def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)]^{this, that} = iterableFactory.from(that match { // sound bcs of VarianceNote + def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote case that: Iterable[B] => new View.Zip(this, that) case _ => iterator.zip(that) }) - def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} = iterableFactory.from(new View.ZipWithIndex(this)) + def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -766,7 +762,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B]^, thisElem: A1, thatElem: B): CC[(A1, B)]^{this, that} = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) /** Converts this $coll of pairs into two collections of the first and second * half of each pair. @@ -787,9 +783,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a pair of ${coll}s, containing the first, respectively second * half of each element pair of this $coll. */ - def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1]^{this}, CC[A2]^{this}) = { - val first: View[A1]^{this} = new View.Map[A, A1](this, asPair(_)._1) - val second: View[A2]^{this} = new View.Map[A, A2](this, asPair(_)._2) + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) (iterableFactory.from(first), iterableFactory.from(second)) } @@ -814,10 +810,10 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a triple of ${coll}s, containing the first, second, respectively * third member of each element triple of this $coll. */ - def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1]^{this}, CC[A2]^{this}, CC[A3]^{this}) = { - val first: View[A1]^{this} = new View.Map[A, A1](this, asTriple(_)._1) - val second: View[A2]^{this} = new View.Map[A, A2](this, asTriple(_)._2) - val third: View[A3]^{this} = new View.Map[A, A3](this, asTriple(_)._3) + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) } @@ -828,7 +824,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the tails of this $coll * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` */ - def tails: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.tail) + def tails: Iterator[C] = iterateUntilEmpty(_.tail) /** Iterates over the inits of this $coll. The first value will be this * $coll and the final one will be an empty $coll, with the intervening @@ -839,24 +835,21 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the inits of this $coll * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` */ - def inits: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.init) + def inits: Iterator[C] = iterateUntilEmpty(_.init) - override def tapEach[U](f: A => U): C^{this, f} = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Iterable[A]^{this} => Iterable[A]^{this}): Iterator[C^{this}]^{this, f} = { + private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` // `this.tail.tail` doesn't compile as `C` is unbounded // `Iterable.from(this)` would eagerly copy non-immutable collections - val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f) - .takeWhile((itble: Iterable[A]^) => itble.iterator.nonEmpty) - // CC TODO type annotation for itble needed. - // The previous code `.takeWhile(_.iterator.nonEmpty)` does not work. + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++:[B >: A](that: IterableOnce[B]^): CC[B]^{this, that} = iterableFactory.from(that match { + def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { case xs: Iterable[B] => new View.Concat(xs, this) case _ => that.iterator ++ iterator }) @@ -869,8 +862,7 @@ object IterableOps { * These operations are implemented in terms of * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. */ - final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]^) extends AnyVal { - this: SizeCompareOps^{it} => + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { /** Tests if the size of the collection is less than some value. */ @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 /** Tests if the size of the collection is less than or equal to some value. */ @@ -895,22 +887,22 @@ object IterableOps { */ @SerialVersionUID(3L) class WithFilter[+A, +CC[_]]( - self: IterableOps[A, CC, _]^, + self: IterableOps[A, CC, _], p: A => Boolean ) extends collection.WithFilter[A, CC] with Serializable { - protected def filtered: Iterable[A]^{this} = + protected def filtered: Iterable[A] = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B]^{this} = + def map[B](f: A => B): CC[B] = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this} = + def flatMap[B](f: A => IterableOnce[B]): CC[B] = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} = + def withFilter(q: A => Boolean): WithFilter[A, CC] = new WithFilter(self, (a: A) => p(a) && q(a)) } @@ -948,7 +940,7 @@ abstract class AbstractIterable[+A] extends Iterable[A] * same as `C`. */ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = iterableFactory.from(coll) + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] // overridden for efficiency, since we know CC[A] =:= C @@ -966,7 +958,7 @@ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends I trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] implicit protected def iterableEvidence: Ev[A @uncheckedVariance] - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = evidenceIterableFactory.from(coll) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty } @@ -988,11 +980,11 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) - override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) } @@ -1012,8 +1004,7 @@ trait SortedSetFactoryDefaults[+A, trait MapFactoryDefaults[K, +V, +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { - this: MapFactoryDefaults[K, V, CC, WithFilterCC] => - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = mapFactory.from(coll) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) @@ -1021,7 +1012,7 @@ trait MapFactoryDefaults[K, +V, case _ => mapFactory.empty } - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC]^{p} = + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) } @@ -1044,9 +1035,9 @@ trait SortedMapFactoryDefaults[K, +V, self: IterableOps[(K, V), WithFilterCC, _] => override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) - override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) } diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index 6836a3bac39a..65d8dce08ae4 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -14,13 +14,12 @@ package scala package collection import scala.annotation.tailrec -import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} +import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.StringBuilder import scala.language.implicitConversions import scala.math.{Numeric, Ordering} import scala.reflect.ClassTag import scala.runtime.AbstractFunction2 -import language.experimental.captureChecking /** * A template trait for collections which can be traversed either once only @@ -43,10 +42,8 @@ import language.experimental.captureChecking * @define coll collection */ trait IterableOnce[+A] extends Any { - this: IterableOnce[A]^ => - /** Iterator can be used only once */ - def iterator: Iterator[A]^{this} + def iterator: Iterator[A] /** Returns a [[scala.collection.Stepper]] for the elements of this collection. * @@ -68,9 +65,9 @@ trait IterableOnce[+A] extends Any { * allow creating parallel streams, whereas bare Steppers can be converted only to sequential * streams. */ - def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = { + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S = { import convert.impl._ - val s: Any = shape.shape match { + val s = shape.shape match { case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) @@ -87,7 +84,7 @@ trait IterableOnce[+A] extends Any { final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") - def withFilter(f: A => Boolean): Iterator[A]^{f} = it.iterator.withFilter(f) + def withFilter(f: A => Boolean): Iterator[A] = it.iterator.withFilter(f) @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) @@ -105,7 +102,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") - def maxBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) @@ -123,7 +120,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) @deprecated("Use .iterator.minBy(...) instead", "2.13.0") - def minBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) @deprecated("Use .iterator.size instead", "2.13.0") def size: Int = it.iterator.size @@ -135,7 +132,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) @deprecated("Use .iterator.filter(...) instead", "2.13.0") - def filter(f: A => Boolean): Iterator[A]^{f} = it.iterator.filter(f) + def filter(f: A => Boolean): Iterator[A] = it.iterator.filter(f) @deprecated("Use .iterator.exists(...) instead", "2.13.0") def exists(f: A => Boolean): Boolean = it.iterator.exists(f) @@ -241,13 +238,13 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") - def map[B](f: A => B): IterableOnce[B]^{f} = it match { + def map[B](f: A => B): IterableOnce[B] = it match { case it: Iterable[A] => it.map(f) case _ => it.iterator.map(f) } @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") - def flatMap[B](f: A => IterableOnce[B]^): IterableOnce[B]^{f} = it match { + def flatMap[B](f: A => IterableOnce[B]): IterableOnce[B] = it match { case it: Iterable[A] => it.flatMap(f) case _ => it.iterator.flatMap(f) } @@ -318,11 +315,9 @@ object IterableOnce { * @define coll collection * */ -trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => /////////////////////////////////////////////////////////////// Abstract methods that must be implemented - import IterableOnceOps.Maximized - /** Produces a $coll containing cumulative results of applying the * operator going left to right, including the initial value. * @@ -334,7 +329,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] /** Selects all elements of this $coll which satisfy a predicate. * @@ -342,7 +337,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ - def filter(p: A => Boolean): C^{this, p} + def filter(p: A => Boolean): C /** Selects all elements of this $coll which do not satisfy a predicate. * @@ -350,7 +345,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll consisting of all elements of this $coll that do not satisfy the given * predicate `pred`. Their order may not be preserved. */ - def filterNot(p: A => Boolean): C^{this, p} + def filterNot(pred: A => Boolean): C /** Selects the first ''n'' elements. * $orderDependent @@ -359,7 +354,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def take(n: Int): C^{this} + def take(n: Int): C /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -367,7 +362,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C^{this, p} + def takeWhile(p: A => Boolean): C /** Selects all elements except first ''n'' ones. * $orderDependent @@ -376,7 +371,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def drop(n: Int): C^{this} + def drop(n: Int): C /** Drops longest prefix of elements that satisfy a predicate. * $orderDependent @@ -384,7 +379,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the longest suffix of this $coll whose first element * does not satisfy the predicate `p`. */ - def dropWhile(p: A => Boolean): C^{this, p} + def dropWhile(p: A => Boolean): C /** Selects an interval of elements. The returned $coll is made up * of all elements `x` which satisfy the invariant: @@ -399,7 +394,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * index `from` extending up to (but not including) index `until` * of this $coll. */ - def slice(from: Int, until: Int): C^{this} + def slice(from: Int, until: Int): C /** Builds a new $coll by applying a function to all elements of this $coll. * @@ -408,7 +403,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. */ - def map[B](f: A => B): CC[B]^{this, f} + def map[B](f: A => B): CC[B] /** Builds a new $coll by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -441,7 +436,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} + def flatMap[B](f: A => IterableOnce[B]): CC[B] /** Converts this $coll of iterable collections into * a $coll formed by the elements of these iterable @@ -469,7 +464,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * type of this $coll is an `Iterable`. * @return a new $coll resulting from concatenating all element ${coll}s. */ - def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] /** Builds a new $coll by applying a partial function to all elements of this $coll * on which the function is defined. @@ -480,7 +475,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} + def collect[B](pf: PartialFunction[A, B]): CC[B] /** Zips this $coll with its indices. * @@ -489,7 +484,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @example * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` */ - def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} + def zipWithIndex: CC[(A @uncheckedVariance, Int)] /** Splits this $coll into a prefix/suffix pair according to a predicate. * @@ -502,7 +497,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a pair consisting of the longest prefix of this $coll whose * elements all satisfy `p`, and the rest of this $coll. */ - def span(p: A => Boolean): (C^{this, p}, C^{this, p}) + def span(p: A => Boolean): (C, C) /** Splits this $coll into a prefix/suffix pair at a given position. * @@ -514,7 +509,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a pair of ${coll}s consisting of the first `n` * elements of this $coll, and the other elements. */ - def splitAt(n: Int): (C^{this}, C^{this}) = { + def splitAt(n: Int): (C, C) = { class Spanner extends runtime.AbstractFunction1[A, Boolean] { var i = 0 def apply(a: A) = i < n && { i += 1 ; true } @@ -532,7 +527,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @tparam U the return type of f * @return The same logical collection as this */ - def tapEach[U](f: A => U): C^{this, f} + def tapEach[U](f: A => U): C /////////////////////////////////////////////////////////////// Concrete methods based on iterator @@ -807,7 +802,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => case _ => Some(reduceLeft(op)) } private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) - private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X]^)(op: (B, X) => B): Option[B] = { + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X])(op: (B, X) => B): Option[B] = { if (it.hasNext) { var acc: B = it.next() while (it.hasNext) @@ -1046,12 +1041,35 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the first element of this $coll with the largest value measured by function f * with respect to the ordering `cmp`. */ - def maxBy[B](f: A -> B)(implicit ord: Ordering[B]): A = + def maxBy[B](f: A => B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.maxBy") case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result } + private class Maximized[X, B](descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + var maxElem: X = null.asInstanceOf[X] + var maxF: B = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } + /** Finds the first element which yields the largest value measured by function f. * * $willNotTerminateInf @@ -1062,7 +1080,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return an option value containing the first element of this $coll with the * largest value measured by function f with respect to the ordering `cmp`. */ - def maxByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = + def maxByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption @@ -1079,7 +1097,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the first element of this $coll with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minBy[B](f: A -> B)(implicit ord: Ordering[B]): A = + def minBy[B](f: A => B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.minBy") case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result @@ -1096,7 +1114,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = + def minByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption @@ -1292,7 +1310,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) @deprecated("Use .iterator instead of .toIterator", "2.13.0") - @`inline` final def toIterator: Iterator[A]^{this} = iterator + @`inline` final def toIterator: Iterator[A] = iterator def toList: immutable.List[A] = immutable.List.from(this) @@ -1334,31 +1352,3 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => xs } } - -object IterableOnceOps: - - // Moved out of trait IterableOnceOps to here, since universal traits cannot - // have nested classes in Scala 3 - private class Maximized[X, B](descriptor: String)(f: X -> B)(cmp: (B, B) -> Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { - var maxElem: X @uncheckedCaptures = null.asInstanceOf[X] - var maxF: B @uncheckedCaptures = null.asInstanceOf[B] - var nonEmpty = false - def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None - def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") - def apply(m: Maximized[X, B], a: X): Maximized[X, B] = - if (m.nonEmpty) { - val fa = f(a) - if (cmp(fa, maxF)) { - maxF = fa - maxElem = a - } - m - } - else { - m.nonEmpty = true - m.maxElem = a - m.maxF = f(a) - m - } - } -end IterableOnceOps \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index ecd8d985bbf0..4b8338ed1b17 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -14,11 +14,8 @@ package scala.collection import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ImmutableBuilder} import scala.annotation.tailrec -import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} +import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking @@ -74,8 +71,7 @@ import caps.unsafe.unsafeAssumePure * iterators as well. * @define coll iterator */ -trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { - self: Iterator[A]^ => +trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { self => /** Check if there is a next element available. * @@ -97,7 +93,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @throws[NoSuchElementException] def next(): A - @inline final def iterator: Iterator[A]^{this} = this + @inline final def iterator = this /** Wraps the value of `next()` in an option. * @@ -121,7 +117,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return a buffered iterator producing the same values as this iterator. * @note Reuse: $consumesAndProducesIterator */ - def buffered: BufferedIterator[A]^{this} = new AbstractIterator[A] with BufferedIterator[A] { + def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -157,16 +153,16 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * A `GroupedIterator` is yielded by `grouped` and by `sliding`, * where the `step` may differ from the group `size`. */ - class GroupedIterator[B >: A](self: Iterator[B]^, size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { + class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: Array[B @uncheckedCaptures] = null // current result - private[this] var prev: Array[B @uncheckedCaptures] = null // if sliding, overlap from previous result + private[this] var buffer: Array[B] = null // current result + private[this] var prev: Array[B] = null // if sliding, overlap from previous result private[this] var first = true // if !first, advancing may skip ahead private[this] var filled = false // whether the buffer is "hot" private[this] var partial = true // whether to emit partial sequence - private[this] var padding: () -> B @uncheckedCaptures = null // what to pad short sequences with + private[this] var padding: () => B = null // what to pad short sequences with private[this] def pad = padding != null // irrespective of partial flag private[this] def newBuilder = { val b = ArrayBuilder.make[Any] @@ -189,7 +185,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial`. * @group Configuration */ - def withPadding(x: -> B): this.type = { + def withPadding(x: => B): this.type = { padding = () => x partial = true // redundant, as padding always results in complete segment this @@ -295,7 +291,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * all elements of this $coll followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A](len: Int, elem: B): Iterator[B]^{this} = new AbstractIterator[B] { + def padTo[B >: A](len: Int, elem: B): Iterator[B] = new AbstractIterator[B] { private[this] var i = 0 override def knownSize: Int = { @@ -325,7 +321,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * is the same as in the original iterator. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def partition(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { + def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { val (a, b) = duplicate (a filter p, b filterNot p) } @@ -345,7 +341,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def grouped[B >: A](size: Int): GroupedIterator[B]^{this} = + def grouped[B >: A](size: Int): GroupedIterator[B] = new GroupedIterator[B](self, size, size) /** Returns an iterator which presents a "sliding window" view of @@ -381,13 +377,13 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B]^{this} = + def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = new GroupedIterator[B](self, size, step) - def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B]^{this, op} = new AbstractIterator[B] { + def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { // We use an intermediate iterator that iterates through the first element `z` // and then that will be modified to iterate through the collection - private[this] var current: Iterator[B]^{self, op} = + private[this] var current: Iterator[B] = new AbstractIterator[B] { override def knownSize = { val thisSize = self.knownSize @@ -416,7 +412,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = ArrayBuffer.from(this).scanRight(z)(op).iterator + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = ArrayBuffer.from(this).scanRight(z)(op).iterator def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) @@ -469,11 +465,11 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @deprecatedOverriding("isEmpty is defined as !hasNext; override hasNext instead", "2.13.0") override def isEmpty: Boolean = !hasNext - def filter(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = false) + def filter(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = false) - def filterNot(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = true) + def filterNot(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = true) - private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { + private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A] = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -483,9 +479,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite while (p(hd) == isFlipped) { if (!self.hasNext) return false hd = self.next() - } + } hdDefined = true - true + true } def next() = @@ -507,9 +503,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ - def withFilter(p: A => Boolean): Iterator[A]^{this, p} = filter(p) + def withFilter(p: A => Boolean): Iterator[A] = filter(p) - def collect[B](pf: PartialFunction[A, B]^): Iterator[B]^{this, pf} = new AbstractIterator[B] with (A -> B) { + def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: B = _ @@ -545,7 +541,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinct: Iterator[A]^{this} = distinctBy(identity) + def distinct: Iterator[A] = distinctBy(identity) /** * Builds a new iterator from this one without any duplicated elements as determined by `==` after applying @@ -557,7 +553,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { + def distinctBy[B](f: A => B): Iterator[A] = new AbstractIterator[A] { private[this] val traversedValues = mutable.HashSet.empty[B] private[this] var nextElementDefined: Boolean = false @@ -582,14 +578,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def map[B](f: A => B): Iterator[B]^{this, f} = new AbstractIterator[B] { + def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { override def knownSize = self.knownSize def hasNext = self.hasNext def next() = f(self.next()) } - def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new AbstractIterator[B] { - private[this] var cur: Iterator[B]^{f} = Iterator.empty + def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new AbstractIterator[B] { + private[this] var cur: Iterator[B] = Iterator.empty /** Trillium logic boolean: -1 = unknown, 0 = false, 1 = true */ private[this] var _hasNext: Int = -1 @@ -623,19 +619,19 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def flatten[B](implicit ev: A -> IterableOnce[B]): Iterator[B]^{this} = + def flatten[B](implicit ev: A => IterableOnce[B]): Iterator[B] = flatMap[B](ev) - def concat[B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = new Iterator.ConcatIterator[B](self).concat(xs) + def concat[B >: A](xs: => IterableOnce[B]): Iterator[B] = new Iterator.ConcatIterator[B](self).concat(xs) - @`inline` final def ++ [B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = concat(xs) + @`inline` final def ++ [B >: A](xs: => IterableOnce[B]): Iterator[B] = concat(xs) - def take(n: Int): Iterator[A]^{this} = sliceIterator(0, n max 0) + def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) - def takeWhile(p: A => Boolean): Iterator[A]^{self, p} = new AbstractIterator[A] { + def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false - private[this] var tail: Iterator[A]^{self} = self + private[this] var tail: Iterator[A] = self def hasNext = hdDefined || tail.hasNext && { hd = tail.next() @@ -646,9 +642,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - def drop(n: Int): Iterator[A]^{this} = sliceIterator(n, -1) + def drop(n: Int): Iterator[A] = sliceIterator(n, -1) - def dropWhile(p: A => Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { + def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator private[this] var status = -1 // Local buffering to avoid double-wrap with .buffered @@ -684,7 +680,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesOneAndProducesTwoIterators */ - def span(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { + def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { /* * Giving a name to following iterator (as opposed to trailing) because * anonymous class is represented as a structural type that trailing @@ -783,10 +779,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite (leading, trailing) } - def slice(from: Int, until: Int): Iterator[A]^{this} = sliceIterator(from, until max 0) + def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) /** Creates an optionally bounded slice, unbounded if `until` is negative. */ - protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 val rest = if (until < 0) -1 // unbounded @@ -797,14 +793,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite else new Iterator.SliceIterator(this, lo, rest) } - def zip[B](that: IterableOnce[B]^): Iterator[(A, B)]^{this, that} = new AbstractIterator[(A, B)] { + def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { val thatIterator = that.iterator override def knownSize = self.knownSize min thatIterator.knownSize def hasNext = self.hasNext && thatIterator.hasNext def next() = (self.next(), thatIterator.next()) } - def zipAll[A1 >: A, B](that: IterableOnce[B]^, thisElem: A1, thatElem: B): Iterator[(A1, B)]^{this, that} = new AbstractIterator[(A1, B)] { + def zipAll[A1 >: A, B](that: IterableOnce[B], thisElem: A1, thatElem: B): Iterator[(A1, B)] = new AbstractIterator[(A1, B)] { val thatIterator = that.iterator override def knownSize = { val thisSize = self.knownSize @@ -821,7 +817,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def zipWithIndex: Iterator[(A, Int)]^{this} = new AbstractIterator[(A, Int)] { + def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { var idx = 0 override def knownSize = self.knownSize def hasNext = self.hasNext @@ -841,7 +837,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @inheritdoc */ - def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]): Boolean = { val those = that.iterator while (hasNext && those.hasNext) if (next() != those.next()) @@ -864,7 +860,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * iterated by one iterator but not yet by the other. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { + def duplicate: (Iterator[A], Iterator[A]) = { val gap = new scala.collection.mutable.Queue[A] var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { @@ -908,7 +904,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @param replaced The number of values in the original iterator that are replaced by the patch. * @note Reuse: $consumesTwoAndProducesOneIterator */ - def patch[B >: A](from: Int, patchElems: Iterator[B]^, replaced: Int): Iterator[B]^{this, patchElems} = + def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { private[this] var origElems = self // > 0 => that many more elems from `origElems` before switching to `patchElems` @@ -948,7 +944,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - override def tapEach[U](f: A => U): Iterator[A]^{this, f} = new AbstractIterator[A] { + override def tapEach[U](f: A => U): Iterator[A] = new AbstractIterator[A] { override def knownSize = self.knownSize override def hasNext = self.hasNext override def next() = { @@ -985,7 +981,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - override def from[A](source: IterableOnce[A]^): Iterator[A]^{source} = source.iterator + override def from[A](source: IterableOnce[A]): Iterator[A] = source.iterator /** The iterator which produces no values. */ @`inline` final def empty[T]: Iterator[T] = _empty @@ -1016,7 +1012,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation * @return An iterator that produces the results of `n` evaluations of `elem`. */ - override def fill[A](len: Int)(elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { + override def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (len - i) max 0 def hasNext: Boolean = i < len @@ -1031,7 +1027,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f The function computing element values * @return An iterator that produces the values `f(0), ..., f(n -1)`. */ - override def tabulate[A](end: Int)(f: Int => A): Iterator[A]^{f} = new AbstractIterator[A] { + override def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (end - i) max 0 def hasNext: Boolean = i < end @@ -1104,7 +1100,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f the function that's repeatedly applied * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[T](start: T)(f: T => T): Iterator[T]^{f} = new AbstractIterator[T] { + def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { private[this] var first = true private[this] var acc = start def hasNext: Boolean = true @@ -1126,7 +1122,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam S Type of the internal state * @return an Iterator that produces elements using `f` until `f` returns `None` */ - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A]^{f} = new UnfoldIterator(init)(f) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A] = new UnfoldIterator(init)(f) /** Creates an infinite-length iterator returning the results of evaluating an expression. * The expression is recomputed for every element. @@ -1134,7 +1130,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation. * @return the iterator containing an infinite number of results of evaluating `elem`. */ - def continually[A](elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { + def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { def hasNext = true def next() = elem } @@ -1142,12 +1138,9 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator to which other iterators can be appended efficiently. * Nested ConcatIterators are merged to avoid blowing the stack. */ - private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { - private var current: Iterator[A @uncheckedCaptures] = from.unsafeAssumePure - // This should be Iteratpr[A]^, but fails since mutable variables can't capture cap. - // To do better we'd need to track nesting levels for universal capabiltities. - private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null - private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null + private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance]) extends AbstractIterator[A] { + private var tail: ConcatIteratorCell[A @uncheckedVariance] = null + private var last: ConcatIteratorCell[A @uncheckedVariance] = null private var currentHasNextChecked = false def hasNext = @@ -1201,8 +1194,8 @@ object Iterator extends IterableFactory[Iterator] { current.next() } else Iterator.empty.next() - override def concat[B >: A](that: => IterableOnce[B]^): Iterator[B]^{this, that} = { - val c: ConcatIteratorCell[A] = new ConcatIteratorCell[B](that, null).asInstanceOf + override def concat[B >: A](that: => IterableOnce[B]): Iterator[B] = { + val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] if (tail == null) { tail = c last = c @@ -1216,14 +1209,14 @@ object Iterator extends IterableFactory[Iterator] { } } - private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^, var tail: ConcatIteratorCell[A @uncheckedCaptures]) { - def headIterator: Iterator[A]^{this} = head.iterator // CC todo: can't use {head} as capture set, gives "cannot establish a reference" + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A]) { + def headIterator: Iterator[A] = head.iterator } /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. */ - private[scala] final class SliceIterator[A](val underlying: Iterator[A]^, start: Int, limit: Int) extends AbstractIterator[A] { + private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { private[this] var remaining = limit private[this] var dropping = start @inline private def unbounded = remaining < 0 @@ -1254,7 +1247,7 @@ object Iterator extends IterableFactory[Iterator] { else if (unbounded) underlying.next() else empty.next() } - override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{underlying} = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 def adjustedBound = if (unbounded) -1 @@ -1276,9 +1269,9 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator that uses a function `f` to produce elements of * type `A` and update an internal state of type `S`. */ - private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)])extends AbstractIterator[A] { - private[this] var state: S @uncheckedCaptures = init - private[this] var nextResult: Option[(A, S)] @uncheckedCaptures = null + private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)]) extends AbstractIterator[A] { + private[this] var state: S = init + private[this] var nextResult: Option[(A, S)] = null override def hasNext: Boolean = { if (nextResult eq null) { @@ -1304,5 +1297,4 @@ object Iterator extends IterableFactory[Iterator] { } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ -abstract class AbstractIterator[+A] extends Iterator[A]: - this: Iterator[A]^ => +abstract class AbstractIterator[+A] extends Iterator[A] diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala new file mode 100644 index 000000000000..569e4e8c60a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/JavaConverters.scala @@ -0,0 +1,335 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.convert._ +import scala.language.implicitConversions + +/** A variety of decorators that enable converting between + * Scala and Java collections using extension methods, `asScala` and `asJava`. + * + * The extension methods return adapters for the corresponding API. + * + * The following conversions are supported via `asScala` and `asJava`: + *{{{ + * scala.collection.Iterable <=> java.lang.Iterable + * scala.collection.Iterator <=> java.util.Iterator + * scala.collection.mutable.Buffer <=> java.util.List + * scala.collection.mutable.Set <=> java.util.Set + * scala.collection.mutable.Map <=> java.util.Map + * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap + *}}} + * The following conversions are supported via `asScala` and through + * specially-named extension methods to convert to Java collections, as shown: + *{{{ + * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) + * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) + * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) + *}}} + * In addition, the following one-way conversions are provided via `asJava`: + *{{{ + * scala.collection.Seq => java.util.List + * scala.collection.mutable.Seq => java.util.List + * scala.collection.Set => java.util.Set + * scala.collection.Map => java.util.Map + *}}} + * The following one way conversion is provided via `asScala`: + *{{{ + * java.util.Properties => scala.collection.mutable.Map + *}}} + * In all cases, converting from a source type to a target type and back + * again will return the original source object. For example: + * {{{ + * import scala.collection.JavaConverters._ + * + * val source = new scala.collection.mutable.ListBuffer[Int] + * val target: java.util.List[Int] = source.asJava + * val other: scala.collection.mutable.Buffer[Int] = target.asScala + * assert(source eq other) + * }}} + * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. + * {{{ + * scala> val vs = java.util.Arrays.asList("hi", "bye") + * vs: java.util.List[String] = [hi, bye] + * + * scala> val ss = asScalaIterator(vs.iterator) + * ss: Iterator[String] = + * + * scala> .toList + * res0: List[String] = List(hi, bye) + * + * scala> val ss = asScalaBuffer(vs) + * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) + * }}} + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object JavaConverters extends AsJavaConverters with AsScalaConverters { + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) + + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) + + @deprecated("Use `asScala` instead", "2.13.0") + def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) + + // Deprecated implicit conversions for code that directly imports them + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[asJavaIterator]] + */ + implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = + new AsJava(asJavaIterator(i)) + + /** + * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[asJavaEnumeration]] + */ + implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = + new AsJavaEnumeration(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[asJavaIterable]] + */ + implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = + new AsJava(asJavaIterable(i)) + + /** + * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[asJavaCollection]] + */ + implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = + new AsJavaCollection(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[bufferAsJavaList]] + */ + implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = + new AsJava(bufferAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[mutableSeqAsJavaList]] + */ + implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = + new AsJava(mutableSeqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. + * @see [[seqAsJavaList]] + */ + implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = + new AsJava(seqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[mutableSetAsJavaSet]] + */ + implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = + new AsJava(mutableSetAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. + * @see [[setAsJavaSet]] + */ + implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = + new AsJava(setAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[mutableMapAsJavaMap]] + */ + implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mutableMapAsJavaMap(m)) + + /** + * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[asJavaDictionary]] + */ + implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = + new AsJavaDictionary(m) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. + * @see [[mapAsJavaMap]] + */ + implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mapAsJavaMap(m)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[mapAsJavaConcurrentMap]]. + */ + implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = + new AsJava(mapAsJavaConcurrentMap(m)) + + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[asScalaIterator]] + */ + implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = + new AsScala(asScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[enumerationAsScalaIterator]] + */ + implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = + new AsScala(enumerationAsScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[iterableAsScalaIterable]] + */ + implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = + new AsScala(iterableAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[collectionAsScalaIterable]] + */ + implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = + new AsScala(collectionAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[asScalaBuffer]] + */ + implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = + new AsScala(asScalaBuffer(l)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[asScalaSet]] + */ + implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = + new AsScala(asScalaSet(s)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[mapAsScalaMap]] + */ + implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(mapAsScalaMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. + * @see [[mapAsScalaConcurrentMap]] + */ + implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = + new AsScala(mapAsScalaConcurrentMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[dictionaryAsScalaMap]] + */ + implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(dictionaryAsScalaMap(p)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * @see [[propertiesAsScalaMap]] + */ + implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = + new AsScala(propertiesAsScalaMap(p)) + + + /** Generic class containing the `asJava` converter method */ + class AsJava[A](op: => A) { + /** Converts a Scala collection to the corresponding Java collection */ + def asJava: A = op + } + + /** Generic class containing the `asScala` converter method */ + class AsScala[A](op: => A) { + /** Converts a Java collection to the corresponding Scala collection */ + def asScala: A = op + } + + /** Generic class containing the `asJavaCollection` converter method */ + class AsJavaCollection[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Collection` */ + def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) + } + + /** Generic class containing the `asJavaEnumeration` converter method */ + class AsJavaEnumeration[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Enumeration` */ + def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) + } + + /** Generic class containing the `asJavaDictionary` converter method */ + class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { + /** Converts a Scala `Map` to a Java `Dictionary` */ + def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) + } +} diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala new file mode 100644 index 000000000000..0553eb8edf7f --- /dev/null +++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala @@ -0,0 +1,422 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions + +/** Decorator representing lazily zipped pairs. + * + * @define coll pair + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1], coll2: Iterable[El2]) { + + /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. + * + * @param that the iterable providing the third element of each eventual triple + * @tparam B the type of the third element in each eventual triple + * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or + * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip3[El1, El2, B, C1] = new LazyZip3(src, coll1, coll2, that) + + def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = f(elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext) + _current = f(elems1.next(), elems2.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: (El1, El2) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + if (p(e1, e2)) _current = (e1, e2) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.hasNext + }) + } + + def exists(p: (El1, El2) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) + + res + } + + def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) + + def foreach[U](f: (El1, El2) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + + while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) + } + + private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = (elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + + override def toString = s"$coll1.lazyZip($coll2)" +} + +object LazyZip2 { + implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable +} + + +/** Decorator representing lazily zipped triples. + * + * @define coll triple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, + coll1: Iterable[El1], + coll2: Iterable[El2], + coll3: Iterable[El3]) { + + /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. + * + * @param that the iterable providing the fourth element of each eventual 4-tuple + * @tparam B the type of the fourth element in each eventual 4-tuple + * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. + * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip4[El1, El2, El3, B, C1] = new LazyZip4(src, coll1, coll2, coll3, that) + + def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: (El1, El2, El3) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + if (p(e1, e2, e3)) _current = (e1, e2, e3) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next()) + + res + } + + def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) + + def foreach[U](f: (El1, El2, El3) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) + f(elems1.next(), elems2.next(), elems3.next()) + } + + private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else s1 min s2 min s3 + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" +} + +object LazyZip3 { + implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable +} + + + +/** Decorator representing lazily zipped 4-tuples. + * + * @define coll tuple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, + coll1: Iterable[El1], + coll2: Iterable[El2], + coll3: Iterable[El3], + coll4: Iterable[El4]) { + + def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: (El1, El2, El3, El4) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + val e4 = elems4.next() + if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + + res + } + + def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) + + def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + + private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else { + val s4 = coll4.knownSize + if (s4 == 0) 0 else s1 min s2 min s3 min s4 + } + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" +} + +object LazyZip4 { + implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = + zipped4.toIterable +} diff --git a/tests/pos-special/stdlib/collection/LinearSeq.scala b/tests/pos-special/stdlib/collection/LinearSeq.scala index 393f5fda4187..449d58c866e3 100644 --- a/tests/pos-special/stdlib/collection/LinearSeq.scala +++ b/tests/pos-special/stdlib/collection/LinearSeq.scala @@ -14,7 +14,6 @@ package scala package collection import scala.annotation.{nowarn, tailrec} -import language.experimental.captureChecking /** Base trait for linearly accessed sequences that have efficient `head` and * `tail` operations. @@ -33,7 +32,7 @@ trait LinearSeq[+A] extends Seq[A] object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) /** Base trait for linear Seq operations */ -trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends AnyRef with SeqOps[A, CC, C] { +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] { /** @inheritdoc * @@ -97,7 +96,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq else loop(0, coll) } - override def lengthCompare(that: Iterable[_]^): Int = { + override def lengthCompare(that: Iterable[_]): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this lengthCompare thatKnownSize @@ -187,7 +186,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq acc } - override def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + override def sameElements[B >: A](that: IterableOnce[B]): Boolean = { @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = (a eq b) || { if (a.nonEmpty && b.nonEmpty && a.head == b.head) { @@ -260,7 +259,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq } } -trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends AnyRef with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { // A more efficient iterator implementation than the default LinearSeqIterator override def iterator: Iterator[A] = new AbstractIterator[A] { private[this] var current = StrictOptimizedLinearSeqOps.this diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index ef4f915ea573..0fb6df9a06dc 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -17,7 +17,6 @@ import scala.annotation.nowarn import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking /** Base Map type */ trait Map[K, +V] @@ -132,7 +131,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] /** Similar to `fromIterable`, but returns a Map collection type. * Note that the return type is now `CC[K2, V2]`. */ - @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]^): CC[K2, V2] = mapFactory.from(it) + @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]): CC[K2, V2] = mapFactory.from(it) /** The companion object of this map, providing various factory methods. * @@ -319,7 +318,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -329,7 +328,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): CC[K, V2] = mapFactory.from(suffix match { + def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -337,7 +336,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] // Not final because subclasses refine the result type, e.g. in SortedMap, the result type is // SortedMap's CC, while Map's CC is fixed to Map /** Alias for `concat` */ - /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) + /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = iterator.map { case (k, v) => s"$k -> $v" }.addString(sb, start, sep, end) @@ -351,14 +350,14 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) @deprecated("Consider requiring an immutable Map.", "2.13.0") - @`inline` def -- (keys: IterableOnce[K]^): C = { + @`inline` def -- (keys: IterableOnce[K]): C = { lazy val keysSet = keys.iterator.to(immutable.Set) fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++: [V1 >: V](that: IterableOnce[(K,V1)]^): CC[K,V1] = { - val thatIterable: Iterable[(K, V1)]^{that} = that match { + def ++: [V1 >: V](that: IterableOnce[(K,V1)]): CC[K,V1] = { + val thatIterable: Iterable[(K, V1)] = that match { case that: Iterable[(K, V1)] => that case that => View.from(that) } @@ -381,10 +380,10 @@ object MapOps { def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{p, q} = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala new file mode 100644 index 000000000000..7f84178a7c16 --- /dev/null +++ b/tests/pos-special/stdlib/collection/MapView.scala @@ -0,0 +1,187 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn +import scala.collection.MapView.SomeMapOps +import scala.collection.mutable.Builder + +trait MapView[K, +V] + extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] + with View[(K, V)] { + + override def view: MapView[K, V] = this + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all keys of this map. + * + * @return the keys of this map as a view. + */ + override def keys: Iterable[K] = new MapView.Keys(this) + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all values of this map. + * + * @return the values of this map as a view. + */ + override def values: Iterable[V] = new MapView.Values(this) + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + override def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + override def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + + override def filter(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, false, pred) + + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, true, pred) + + override def partition(p: ((K, V)) => Boolean): (MapView[K, V], MapView[K, V]) = (filter(p), filterNot(p)) + + override def tapEach[U](f: ((K, V)) => U): MapView[K, V] = new MapView.TapEach(this, f) + + def mapFactory: MapViewFactory = MapView + + override def empty: MapView[K, V] = mapFactory.empty + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l] = new MapOps.WithFilter(this, p) + + override def toString: String = super[View].toString + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "MapView" +} + +object MapView extends MapViewFactory { + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] + /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ + type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] + + @SerialVersionUID(3L) + private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + override def get(key: Any): Option[Nothing] = None + override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this + override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this + override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) + } + + @SerialVersionUID(3L) + class Id[K, +V](underlying: SomeMapOps[K, V]) extends AbstractMapView[K, V] { + def get(key: K): Option[V] = underlying.get(key) + def iterator: Iterator[(K, V)] = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Keys[K](underlying: SomeMapOps[K, _]) extends AbstractView[K] { + def iterator: Iterator[K] = underlying.keysIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Values[+V](underlying: SomeMapOps[_, V]) extends AbstractView[V] { + def iterator: Iterator[V] = underlying.valuesIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V], f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)] = underlying.iterator.map(kv => (kv._1, f(kv._2))) + def get(key: K): Option[W] = underlying.get(key).map(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class FilterKeys[K, +V](underlying: SomeMapOps[K, V], p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)] = underlying.iterator.filter { case (k, _) => p(k) } + def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class Filter[K, +V](underlying: SomeMapOps[K, V], isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)] = underlying.iterator.filterImpl(p, isFlipped) + def get(key: K): Option[V] = underlying.get(key) match { + case s @ Some(v) if p((key, v)) != isFlipped => s + case _ => None + } + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V], f: ((K, V)) => U) extends AbstractMapView[K, V] { + override def get(key: K): Option[V] = { + underlying.get(key) match { + case s @ Some(v) => + f((key, v)) + s + case None => None + } + } + override def iterator: Iterator[(K, V)] = underlying.iterator.tapEach(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + override def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + + override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] + + override def from[K, V](it: IterableOnce[(K, V)]): View[(K, V)] = View.from(it) + + override def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] = it match { + case mv: MapView[K, V] => mv + case other => new MapView.Id(other) + } + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { + + def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] + + def empty[X, Y]: MapView[X, Y] + + def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V] + diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala new file mode 100644 index 000000000000..874a06449aa9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Searching.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions +import scala.collection.generic.IsSeq + +object Searching { + + /** The result of performing a search on a sorted sequence + * + * Example usage: + * + * {{{ + * val list = List(1, 3, 4, 5) // list must be sorted before searching + * list.search(4) // Found(2) + * list.search(2) // InsertionPoint(1) + * }}} + * + * */ + sealed abstract class SearchResult { + /** The index corresponding to the element searched for in the sequence, if it was found, + * or the index where the element would be inserted in the sequence, if it was not in the sequence */ + def insertionPoint: Int + } + + /** The result of performing a search on a sorted sequence, where the element was found. + * + * @param foundIndex the index corresponding to the element searched for in the sequence + */ + case class Found(foundIndex: Int) extends SearchResult { + override def insertionPoint: Int = foundIndex + } + + /** The result of performing a search on a sorted sequence, where the element was not found + * + * @param insertionPoint the index where the element would be inserted in the sequence + */ + case class InsertionPoint(insertionPoint: Int) extends SearchResult + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = + new SearchImpl(fr.conversion(coll)) +} diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index caabf6fa6436..d960838fdcb7 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -16,8 +16,6 @@ import scala.collection.immutable.Range import scala.util.hashing.MurmurHash3 import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure /** Base trait for sequence collections * @@ -29,7 +27,6 @@ trait Seq[+A] with SeqOps[A, Seq, Seq[A]] with IterableFactoryDefaults[A, Seq] with Equals { - this: Seq[A] => override def iterableFactory: SeqFactory[Seq] = Seq @@ -77,7 +74,8 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any + with IterableOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) @@ -162,13 +160,13 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return a new $coll which contains all elements of `prefix` followed * by all the elements of this $coll. */ - def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = iterableFactory.from(prefix match { + def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = iterableFactory.from(prefix match { case prefix: Iterable[B] => new View.Concat(prefix, this) case _ => prefix.iterator ++ iterator }) /** Alias for `prependedAll` */ - @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]^): CC[B] = prependedAll(prefix) + @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]): CC[B] = prependedAll(prefix) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -179,15 +177,14 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return a new collection of type `CC[B]` which contains all elements * of this $coll followed by all elements of `suffix`. */ - def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = - super.concat(suffix).unsafeAssumePure + def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = super.concat(suffix) /** Alias for `appendedAll` */ - @`inline` final def :++ [B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) + @`inline` final def :++ [B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) // Make `concat` an alias for `appendedAll` so that it benefits from performance // overrides of this method - @`inline` final override def concat[B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) + @`inline` final override def concat[B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) /** Produces a new sequence which contains all elements of this $coll and also all elements of * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. @@ -215,7 +212,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @tparam B the type of the elements after being transformed by `f` * @return a new $coll consisting of all the elements of this $coll without duplicates. */ - def distinctBy[B](f: A -> B): C = fromSpecific(new View.DistinctBy(this, f)) + def distinctBy[B](f: A => B): C = fromSpecific(new View.DistinctBy(this, f)) /** Returns new $coll with elements in reversed order. * @@ -246,7 +243,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return `true` if the sequence `that` is contained in this $coll at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: IterableOnce[B]^, offset: Int = 0): Boolean = { + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = { val i = iterator drop offset val j = that.iterator while (j.hasNext && i.hasNext) @@ -261,7 +258,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Iterable[B]^): Boolean = { + def endsWith[B >: A](that: Iterable[B]): Boolean = { if (that.isEmpty) true else { val i = iterator.drop(length - that.size) @@ -631,9 +628,6 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => private[this] def init() = { val m = mutable.HashMap[A, Int]() - //val s1 = self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) - //val s2: Seq[(A, Int)] = s1 sortBy (_._2) - //val (es, is) = s2.unzip(using Predef.$conforms[(A, Int)]) val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip (es.to(mutable.ArrayBuffer), is.toArray) @@ -813,7 +807,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => */ def lengthCompare(len: Int): Int = super.sizeCompare(len) - override final def sizeCompare(that: Iterable[_]^): Int = lengthCompare(that) + override final def sizeCompare(that: Iterable[_]): Int = lengthCompare(that) /** Compares the length of this $coll to the size of another `Iterable`. * @@ -828,7 +822,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * is `O(this.length min that.size)` instead of `O(this.length + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def lengthCompare(that: Iterable[_]^): Int = super.sizeCompare(that) + def lengthCompare(that: Iterable[_]): Int = super.sizeCompare(that) /** Returns a value class containing operations for comparing the length of this $coll to a test value. * @@ -851,7 +845,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => /** Are the elements of this collection the same (and in the same order) * as those of `that`? */ - def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]): Boolean = { val thisKnownSize = knownSize val knownSizeDifference = thisKnownSize != -1 && { val thatKnownSize = that.knownSize @@ -943,7 +937,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * except that `replaced` elements starting from `from` are replaced * by all the elements of `other`. */ - def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = + def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = iterableFactory.from(new View.Patched(this, from, other, replaced)) /** A copy of this $coll with one single replaced element. @@ -1010,11 +1004,11 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return a `Found` value containing the index corresponding to the element in the * sequence, or the `InsertionPoint` where the element would be inserted if * the element is not in the sequence. - * + * * @note if `to <= from`, the search space is empty, and an `InsertionPoint` at `from` * is returned */ - def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = + def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = linearSearch(view.slice(from, to), elem, math.max(0, from))(ord) private[this] def linearSearch[B >: A](c: View[A], elem: B, offset: Int) diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala new file mode 100644 index 000000000000..05bf126aba02 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqMap.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn + +/** + * A generic trait for ordered maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] extends Map[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqMap" + + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) + diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala new file mode 100644 index 000000000000..ad16f01b9184 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -0,0 +1,209 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn + + +trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { + override def view: SeqView[A] = this + + override def map[B](f: A => B): SeqView[B] = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B] = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B] = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A] = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A] = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A] = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A] = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A] = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A] = new SeqView.Map(this, { (a: A) => f(a); a }) + + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(prefix, this) + + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A] = new SeqView.Sorted(this, ord) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqView" +} + +object SeqView { + + /** A `SeqOps` whose collection type and collection type constructor are unknown */ + private type SomeSeqOps[+A] = SeqOps[A, AnyConstr, _] + + /** A view that doesn’t apply any transformation to an underlying sequence */ + @SerialVersionUID(3L) + class Id[+A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + def apply(idx: Int): A = underlying.apply(idx) + def length: Int = underlying.length + def iterator: Iterator[A] = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeSeqOps[A], f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + def apply(idx: Int): B = f(underlying(idx)) + def length: Int = underlying.length + } + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeSeqOps[A], elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]) extends View.Prepended(elem, underlying) with SeqView[A] { + def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeSeqOps[A], suffix: SomeSeqOps[A]) extends View.Concat[A](prefix, suffix) with SeqView[A] { + def apply(idx: Int): A = { + val l = prefix.length + if (idx < l) prefix(idx) else suffix(idx - l) + } + def length: Int = prefix.length + suffix.length + } + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + def apply(i: Int) = underlying.apply(size - 1 - i) + def length = underlying.size + def iterator: Iterator[A] = underlying.reverseIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Take[+A](underlying: SomeSeqOps[A], n: Int) extends View.Take(underlying, n) with SeqView[A] { + def apply(idx: Int): A = if (idx < n) { + underlying(idx) + } else { + throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})") + } + def length: Int = underlying.length min normN + } + + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeSeqOps[A], n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + private[this] val delta = (underlying.size - (n max 0)) max 0 + def length = underlying.size - delta + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + delta) + } + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeSeqOps[A], n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + def length = (underlying.size - normN) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + normN) + override def drop(n: Int): SeqView[A] = new Drop(underlying, this.n + n) + } + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeSeqOps[A], n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + private[this] val len = (underlying.size - (n max 0)) max 0 + def length = len + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i) + } + + @SerialVersionUID(3L) + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A], + private[this] val len: Int, + ord: Ordering[B]) + extends SeqView[A] { + outer => + + // force evaluation immediately by calling `length` so infinite collections + // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls + def this(underlying: SomeSeqOps[A], ord: Ordering[B]) = this(underlying, underlying.length, ord) + + @SerialVersionUID(3L) + private[this] class ReverseSorted extends SeqView[A] { + private[this] lazy val _reversed = new SeqView.Reverse(_sorted) + + def apply(i: Int): A = _reversed.apply(i) + def length: Int = len + def iterator: Iterator[A] = Iterator.empty ++ _reversed.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) + override def reverse: SeqView[A] = outer + override protected def reversed: Iterable[A] = outer + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + if (ord1 == Sorted.this.ord) outer + else if (ord1.isReverseOf(Sorted.this.ord)) this + else new Sorted(elems, len, ord1) + } + + @volatile private[this] var evaluated = false + + private[this] lazy val _sorted: Seq[A] = { + val res = { + val len = this.len + if (len == 0) Nil + else if (len == 1) List(underlying.head) + else { + val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] + underlying.copyToArray(arr) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it + // is safe because: + // - the ArraySeq is immutable, and items that are not of type A + // cannot be added to it + // - we know it only contains items of type A (and if this collection + // contains items of another type, we'd get a CCE anyway) + // - the cast doesn't actually do anything in the runtime because the + // type of A is not known and Array[_] is Array[AnyRef] + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + } + } + evaluated = true + underlying = null + res + } + + private[this] def elems: SomeSeqOps[A] = { + val orig = underlying + if (evaluated) _sorted else orig + } + + def apply(i: Int): A = _sorted.apply(i) + def length: Int = len + def iterator: Iterator[A] = Iterator.empty ++ _sorted.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) + override def reverse: SeqView[A] = new ReverseSorted + // we know `_sorted` is either tiny or has efficient random access, + // so this is acceptable for `reversed` + override protected def reversed: Iterable[A] = new ReverseSorted + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + if (ord1 == this.ord) this + else if (ord1.isReverseOf(this.ord)) reverse + else new Sorted(elems, len, ord1) + } +} + +/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala new file mode 100644 index 000000000000..0ea1e5689473 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -0,0 +1,269 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.util.hashing.MurmurHash3 +import java.lang.String + +import scala.annotation.nowarn + +/** Base trait for set collections. + */ +trait Set[A] + extends Iterable[A] + with SetOps[A, Set, Set[A]] + with Equals + with IterableFactoryDefaults[A, Set] { + + def canEqual(that: Any) = true + + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ + override def equals(that: Any): Boolean = + (this eq that.asInstanceOf[AnyRef]) || (that match { + case set: Set[A @unchecked] if set.canEqual(this) => + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.setHash(this) + + override def iterableFactory: IterableFactory[Set] = Set + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Set" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too +} + +/** Base trait for set operations + * + * @define coll set + * @define Coll `Set` + */ +trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends IterableOps[A, CC, C] + with (A => Boolean) { + + def contains(elem: A): Boolean + + /** Tests if some element is contained in this set. + * + * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. + * @param elem the element to test for membership. + * @return `true` if `elem` is contained in this set, `false` otherwise. + */ + @`inline` final def apply(elem: A): Boolean = this.contains(elem) + + /** Tests whether this set is a subset of another set. + * + * @param that the set to test. + * @return `true` if this set is a subset of `that`, i.e. if + * every element of this set is also an element of `that`. + */ + def subsetOf(that: Set[A]): Boolean = this.forall(that) + + /** An iterator over all subsets of this set of the given size. + * If the requested size is impossible, an empty iterator is returned. + * + * @param len the size of the subsets. + * @return the iterator. + */ + def subsets(len: Int): Iterator[C] = { + if (len < 0 || len > size) Iterator.empty + else new SubsetsItr(this.to(IndexedSeq), len) + } + + /** An iterator over all subsets of this set. + * + * @return the iterator. + */ + def subsets(): Iterator[C] = new AbstractIterator[C] { + private[this] val elms = SetOps.this.to(IndexedSeq) + private[this] var len = 0 + private[this] var itr: Iterator[C] = Iterator.empty + + def hasNext = len <= elms.size || itr.hasNext + def next() = { + if (!itr.hasNext) { + if (len > elms.size) Iterator.empty.next() + else { + itr = new SubsetsItr(elms, len) + len += 1 + } + } + + itr.next() + } + } + + /** An Iterator including all subsets containing exactly len elements. + * If the elements in 'This' type is ordered, then the subsets will also be in the same order. + * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} + * + * $willForceEvaluation + * + */ + private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { + private[this] val idxs = Array.range(0, len+1) + private[this] var _hasNext = true + idxs(len) = elms.size + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) Iterator.empty.next() + + val buf = newSpecificBuilder + idxs.slice(0, len) foreach (idx => buf += elms(idx)) + val result = buf.result() + + var i = len - 1 + while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 + + if (i < 0) _hasNext = false + else { + idxs(i) += 1 + for (j <- (i+1) until len) + idxs(j) = idxs(j-1) + 1 + } + + result + } + } + + /** Computes the intersection between this set and another set. + * + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def intersect(that: Set[A]): C = this.filter(that) + + /** Alias for `intersect` */ + @`inline` final def & (that: Set[A]): C = intersect(that) + + /** Computes the difference of this set and another set. + * + * @param that the set of elements to exclude. + * @return a set containing those elements of this + * set that are not also contained in the given set `that`. + */ + def diff(that: Set[A]): C + + /** Alias for `diff` */ + @`inline` final def &~ (that: Set[A]): C = this diff that + + @deprecated("Consider requiring an immutable Set", "2.13.0") + def -- (that: IterableOnce[A]): C = { + val toRemove = that.iterator.to(immutable.Set) + fromSpecific(view.filterNot(toRemove)) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") + def - (elem: A): C = diff(Set(elem)) + + @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") + def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) + + /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. + * + * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. + * + * Example: + * {{{ + * scala> val a = Set(1, 2) concat Set(2, 3) + * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) + * }}} + * + * @param that the collection containing the elements to add. + * @return a new $coll with the given elements added, omitting duplicates. + */ + def concat(that: collection.IterableOnce[A]): C = this match { + case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => + // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) + var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] + val it = that.iterator + while (it.hasNext) result = result + it.next() + result.asInstanceOf[C] + case _ => fromSpecific(that match { + case that: collection.Iterable[A] => new View.Concat(this, that) + case _ => iterator.concat(that.iterator) + }) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + /** Alias for `concat` */ + @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) + + /** Computes the union between of set and another set. + * + * @param that the set to form the union with. + * @return a new set consisting of all elements that are in this + * set or in the given set `that`. + */ + @`inline` final def union(that: Set[A]): C = concat(that) + + /** Alias for `union` */ + @`inline` final def | (that: Set[A]): C = concat(that) +} + +/** + * $factoryInfo + * @define coll set + * @define Coll `Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](immutable.Set) + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala new file mode 100644 index 000000000000..03ab0bb0dadc --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedMap.scala @@ -0,0 +1,220 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala new file mode 100644 index 000000000000..64e6376be042 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedOps.scala @@ -0,0 +1,90 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +/** Base trait for sorted collections */ +trait SortedOps[A, +C] { + + def ordering: Ordering[A] + + /** Returns the first key of the collection. */ + def firstKey: A + + /** Returns the last key of the collection. */ + def lastKey: A + + /** Comparison function that orders keys. */ + @deprecated("Use ordering.compare instead", "2.13.0") + @deprecatedOverriding("Use ordering.compare instead", "2.13.0") + @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) + + /** Creates a ranged projection of this collection. Any mutations in the + * ranged projection will update this collection and vice versa. + * + * Note: keys are not guaranteed to be consistent between this collection + * and the projection. This is the case for buffers where indexing is + * relative to the projection. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * `None` if there is no lower bound. + * @param until The upper-bound (exclusive) of the ranged projection. + * `None` if there is no upper bound. + */ + def rangeImpl(from: Option[A], until: Option[A]): C + + /** Creates a ranged projection of this collection with both a lower-bound + * and an upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeFrom", "2.13.0") + final def from(from: A): C = rangeFrom(from) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + def rangeFrom(from: A): C = rangeImpl(Some(from), None) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + @deprecated("Use rangeUntil", "2.13.0") + final def until(until: A): C = rangeUntil(until) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def rangeUntil(until: A): C = rangeImpl(None, Some(until)) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeTo", "2.13.0") + final def to(to: A): C = rangeTo(to) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + def rangeTo(to: A): C +} diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala new file mode 100644 index 000000000000..c98ca9ae5523 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -0,0 +1,189 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.{implicitNotFound, nowarn} +import scala.annotation.unchecked.uncheckedVariance + +/** Base type of sorted sets */ +trait SortedSet[A] extends Set[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + def unsorted: Set[A] = this + + def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedSet" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => + (ss canEqual this) && + (this.size == ss.size) && { + val i1 = this.iterator + val i2 = ss.iterator + var allEqual = true + while (allEqual && i1.hasNext) + allEqual = ordering.equiv(i1.next(), i2.next()) + allEqual + } + case _ => + super.equals(that) + } + +} + +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with SortedOps[A, C] { + + /** The companion object of this sorted set, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedIterableFactory: SortedIterableFactory[CC] + + def unsorted: Set[A] + + /** + * Creates an iterator that contains all values from this collection + * greater than or equal to `start` according to the ordering of + * this collection. x.iteratorFrom(y) is equivalent to but will usually + * be more efficient than x.from(y).iterator + * + * @param start The lower-bound (inclusive) of the iterator + */ + def iteratorFrom(start: A): Iterator[A] + + @deprecated("Use `iteratorFrom` instead.", "2.13.0") + @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) + + def firstKey: A = head + def lastKey: A = last + + /** Find the smallest element larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: A): Option[A] = rangeFrom(key).headOption + + /** Find the largest element less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption + + override def min[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.min") + else if (ord == ordering) head + else if (ord isReverseOf ordering) last + else super.min[B] // need the type annotation for it to infer the correct implicit + + override def max[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.max") + else if (ord == ordering) last + else if (ord isReverseOf ordering) head + else super.max[B] // need the type annotation for it to infer the correct implicit + + def rangeTo(to: A): C = { + val i = rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + /** Builds a new sorted collection by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Map(this, f)) + + /** Builds a new sorted collection by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.FlatMap(this, f)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote + sortedIterableFactory.from(that match { + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + /** Builds a new sorted collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Collect(this, pf)) +} + +object SortedSetOps { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + + /** Specialize `WithFilter` for sorted collections + * + * @define coll sorted collection + */ + class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( + self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], + p: A => Boolean + ) extends IterableOps.WithFilter[A, IterableCC](self, p) { + + def map[B : Ordering](f: A => B): CC[B] = + self.sortedIterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = + self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) + } + +} + +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) + diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala new file mode 100644 index 000000000000..0eeb8a44cb72 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -0,0 +1,368 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} +import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** Steppers exist to enable creating Java streams over Scala collections, see + * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections + * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. + * + * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference + * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are + * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). + * These enable iterating over collections holding unboxed primitives (e.g., Arrays, + * [[scala.jdk.Accumulator]]s) without boxing the elements. + * + * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized + * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) + * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). + * + * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive + * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. + * + * @tparam A the element type of the Stepper + */ +trait Stepper[@specialized(Double, Int, Long) +A] { + /** Check if there's an element available. */ + def hasStep: Boolean + + /** Return the next element and advance the stepper */ + def nextStep(): A + + /** Split this stepper, if applicable. The elements of the current Stepper are split up between + * the resulting Stepper and the current stepper. + * + * May return `null`, in which case the current Stepper yields the same elements as before. + * + * See method `trySplit` in [[java.util.Spliterator]]. + */ + def trySplit(): Stepper[A] + + /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See + * method `estimateSize` in [[java.util.Spliterator]]. + */ + def estimateSize: Long + + /** Returns a set of characteristics of this Stepper and its elements. See method + * `characteristics` in [[java.util.Spliterator]]. + */ + def characteristics: Int + + /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. + * + * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning + * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] + * (which is a `Stepper[Int]`). + */ + def spliterator[B >: A]: Spliterator[_] + + /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. + * + * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning + * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass + * [[IntStepper]] (which is a `Stepper[Int]`). + */ + def javaIterator[B >: A]: JIterator[_] + + /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to + * primitive Steppers box the elements. + */ + def iterator: Iterator[A] = new AbstractIterator[A] { + def hasNext: Boolean = hasStep + def next(): A = nextStep() + } +} + +object Stepper { + /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time + * and space complexity, and that the division is likely to be reasonably even. Steppers marked + * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method + * defined in [[scala.jdk.StreamConverters]]. + */ + trait EfficientSplit + + private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") + + /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. + * This provides a basis for more efficient stream processing on unboxed values provided that the original source + * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided + * (see for example IntArrayStepper and WidenedByteArrayStepper). */ + + private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingDoubleStepper(s) + } + } + + private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingIntStepper(s) + } + } + + private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): LongStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingLongStepper(s) + } + } + + private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingByteStepper(s) + } + } + + private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingCharStepper(s) + } + } + + private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingShortStepper(s) + } + } + + private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingFloatStepper(s) + } + } +} + +/** A Stepper for arbitrary element types. See [[Stepper]]. */ +trait AnyStepper[+A] extends Stepper[A] { + def trySplit(): AnyStepper[A] + + def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + + def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { + def hasNext: Boolean = hasStep + def next(): B = nextStep() + } +} + +object AnyStepper { + class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + def tryAdvance(c: Consumer[_ >: A]): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + def trySplit(): Spliterator[A] = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: A]): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + } + + def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) + def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit + + def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) + def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit + + def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) + def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit + + private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Double] = { + val s = st.trySplit() + if (s == null) null else new BoxedDoubleStepper(s) + } + } + + private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Int] = { + val s = st.trySplit() + if (s == null) null else new BoxedIntStepper(s) + } + } + + private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Long] = { + val s = st.trySplit() + if (s == null) null else new BoxedLongStepper(s) + } + } +} + +/** A Stepper for Ints. See [[Stepper]]. */ +trait IntStepper extends Stepper[Int] { + def trySplit(): IntStepper + + def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + + def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { + def hasNext: Boolean = hasStep + def nextInt(): Int = nextStep() + } +} +object IntStepper { + class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + def tryAdvance(c: IntConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { + case ic: IntConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfInt = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: IntConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { + case ic: IntConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Doubles. See [[Stepper]]. */ +trait DoubleStepper extends Stepper[Double] { + def trySplit(): DoubleStepper + + def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def hasNext: Boolean = hasStep + def nextDouble(): Double = nextStep() + } +} + +object DoubleStepper { + class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + def tryAdvance(c: DoubleConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { + case ic: DoubleConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfDouble = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: DoubleConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { + case ic: DoubleConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Longs. See [[Stepper]]. */ +trait LongStepper extends Stepper[Long] { + def trySplit(): LongStepper + + def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + + def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def hasNext: Boolean = hasStep + def nextLong(): Long = nextStep() + } +} + +object LongStepper { + class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + def tryAdvance(c: LongConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { + case ic: LongConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfLong = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: LongConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { + case ic: LongConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } + } + } +} diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala new file mode 100644 index 000000000000..6712073b09e4 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StepperShape.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly + * specialized Stepper `S` according to the element type `T`. + */ +sealed trait StepperShape[-T, S <: Stepper[_]] { + /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ + def shape: StepperShape.Shape + + /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def seqUnbox(st: AnyStepper[T]): S + + /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit +} + +object StepperShape extends StepperShapeLowPriority1 { + class Shape private[StepperShape] (private val s: Int) extends AnyVal + + // reference + val ReferenceShape = new Shape(0) + + // primitive + val IntShape = new Shape(1) + val LongShape = new Shape(2) + val DoubleShape = new Shape(3) + + // widening + val ByteShape = new Shape(4) + val ShortShape = new Shape(5) + val CharShape = new Shape(6) + val FloatShape = new Shape(7) + + implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { + def shape = IntShape + def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) + def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit + } + implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] + + implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { + def shape = LongShape + def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) + def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit + } + implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] + + implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { + def shape = DoubleShape + def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) + def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit + } + implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] + + implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { + def shape = ByteShape + def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) + def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit + } + implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] + + implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { + def shape = ShortShape + def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) + def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit + } + implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] + + implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { + def shape = CharShape + def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) + def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit + } + implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] + + implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { + def shape = FloatShape + def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) + def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit + } + implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] +} + +trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { + implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] +} + +trait StepperShapeLowPriority2 { + implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] + + protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { + def shape = StepperShape.ReferenceShape + def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st + def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st + } +} \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala index 5b504a2469b5..a09766cfa912 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala @@ -16,7 +16,6 @@ package collection import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics -import language.experimental.captureChecking /** * Trait that overrides iterable operations to take advantage of strict builders. @@ -28,7 +27,6 @@ import language.experimental.captureChecking trait StrictOptimizedIterableOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - this: StrictOptimizedIterableOps[A, CC, C] => // Optimized, push-based version of `partition` override def partition(p: A => Boolean): (C, C) = { @@ -57,7 +55,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1], CC[A2]) = { + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { val first = iterableFactory.newBuilder[A1] val second = iterableFactory.newBuilder[A2] foreach { a => @@ -68,7 +66,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { val b1 = iterableFactory.newBuilder[A1] val b2 = iterableFactory.newBuilder[A2] val b3 = iterableFactory.newBuilder[A3] @@ -104,7 +102,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatMap[B](f: A => IterableOnce[B]^): CC[B] = + override def flatMap[B](f: A => IterableOnce[B]): CC[B] = strictOptimizedFlatMap(iterableFactory.newBuilder, f) /** @@ -114,7 +112,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]^): C2 = { + @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]): C2 = { val it = iterator while (it.hasNext) { b ++= f(it.next()) @@ -129,13 +127,13 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B]^, b: mutable.Builder[B, C2]): C2 = { + @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B], b: mutable.Builder[B, C2]): C2 = { b ++= this b ++= that b.result() } - override def collect[B](pf: PartialFunction[A, B]^): CC[B] = + override def collect[B](pf: PartialFunction[A, B]): CC[B] = strictOptimizedCollect(iterableFactory.newBuilder, pf) /** @@ -145,7 +143,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]^): C2 = { + @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]): C2 = { val marker = Statics.pfMarker val it = iterator while (it.hasNext) { @@ -156,7 +154,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatten[B](implicit toIterableOnce: A -> IterableOnce[B]): CC[B] = + override def flatten[B](implicit toIterableOnce: A => IterableOnce[B]): CC[B] = strictOptimizedFlatten(iterableFactory.newBuilder) /** @@ -166,7 +164,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A -> IterableOnce[B]): C2 = { + @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A => IterableOnce[B]): C2 = { val it = iterator while (it.hasNext) { b ++= toIterableOnce(it.next()) @@ -174,7 +172,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)] = + override def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = strictOptimizedZip(that, iterableFactory.newBuilder[(A, B)]) /** @@ -184,7 +182,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[(Int, String)]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B]^, b: mutable.Builder[(A, B), C2]): C2 = { + @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B], b: mutable.Builder[(A, B), C2]): C2 = { val it1 = iterator val it2 = that.iterator while (it1.hasNext && it2.hasNext) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala new file mode 100644 index 000000000000..1f5791bbb718 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala @@ -0,0 +1,48 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends MapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + strictOptimizedMap(mapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + strictOptimizedFlatMap(mapFactory.newBuilder, f) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(suffix, mapFactory.newBuilder) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + strictOptimizedCollect(mapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val b = mapFactory.newBuilder[K, V1] + b ++= this + b += elem1 + b += elem2 + if (elems.nonEmpty) b ++= elems + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 50ddbca30f9e..396e53885081 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -11,18 +11,17 @@ */ package scala.collection -import language.experimental.captureChecking /** * Trait that overrides operations on sequences in order * to take advantage of strict builders. */ trait StrictOptimizedSeqOps [+A, +CC[_], +C] - extends AnyRef + extends Any with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { - override def distinctBy[B](f: A -> B): C = { + override def distinctBy[B](f: A => B): C = { val builder = newSpecificBuilder val seen = mutable.HashSet.empty[B] val it = this.iterator @@ -53,10 +52,10 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] b.result() } - override def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = + override def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = strictOptimizedConcat(suffix, iterableFactory.newBuilder) - override def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = { + override def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = { val b = iterableFactory.newBuilder[B] b ++= prefix b ++= this diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala new file mode 100644 index 000000000000..356bd2883578 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: IterableOnce[A]): C = + strictOptimizedConcat(that, newSpecificBuilder) + +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..1beaf1662abe --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala new file mode 100644 index 000000000000..ded7deabccca --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.implicitNotFound +import scala.annotation.unchecked.uncheckedVariance + +/** + * Trait that overrides sorted set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { + + override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedMap(sortedIterableFactory.newBuilder, f) + + override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) + + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) + + override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) + +} diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index f570531def98..f0be485af8ae 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -22,7 +22,6 @@ import scala.collection.mutable.StringBuilder import scala.math.{ScalaNumber, max, min} import scala.reflect.ClassTag import scala.util.matching.Regex -import language.experimental.captureChecking object StringOps { // just statics for companion class. @@ -124,7 +123,7 @@ object StringOps { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -156,7 +155,7 @@ object StringOps { } /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: Char => Boolean): WithFilter^{p, q} = new WithFilter(a => p(a) && q(a), s) + def withFilter(q: Char => Boolean): WithFilter = new WithFilter(a => p(a) && q(a), s) } /** Avoid an allocation in [[collect]]. */ @@ -239,7 +238,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -314,7 +313,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection which contains all chars * of this string followed by all elements of `suffix`. */ - def concat[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = { + def concat[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = suffix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -330,7 +329,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string which contains all chars * of this string followed by all chars of `suffix`. */ - def concat(suffix: IterableOnce[Char]^): String = { + def concat(suffix: IterableOnce[Char]): String = { val k = suffix.knownSize val sb = new JStringBuilder(s.length + (if(k >= 0) k else 16)) sb.append(s) @@ -348,10 +347,10 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def concat(suffix: String): String = s + suffix /** Alias for `concat` */ - @`inline` def ++[B >: Char](suffix: Iterable[B]^): immutable.IndexedSeq[B] = concat(suffix) + @`inline` def ++[B >: Char](suffix: Iterable[B]): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `concat` */ - @`inline` def ++(suffix: IterableOnce[Char]^): String = concat(suffix) + @`inline` def ++(suffix: IterableOnce[Char]): String = concat(suffix) /** Alias for `concat` */ def ++(xs: String): String = concat(xs) @@ -423,7 +422,7 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def +: (c: Char): String = prepended(c) /** A copy of the string with all elements from a collection prepended */ - def prependedAll[B >: Char](prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = { + def prependedAll[B >: Char](prefix: IterableOnce[B]): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = prefix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -433,7 +432,7 @@ final class StringOps(private val s: String) extends AnyVal { } /** Alias for `prependedAll` */ - @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = prependedAll(prefix) + @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]): immutable.IndexedSeq[B] = prependedAll(prefix) /** A copy of the string with another string prepended */ def prependedAll(prefix: String): String = prefix + s @@ -461,11 +460,11 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def :+ (c: Char): String = appended(c) /** A copy of the string with all elements from a collection appended */ - @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = + @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `appendedAll` */ - @`inline` def :++ [B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = + @`inline` def :++ [B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = concat(suffix) /** A copy of the string with another string appended */ @@ -487,7 +486,7 @@ final class StringOps(private val s: String) extends AnyVal { * except that `replaced` chars starting from `from` are replaced * by `other`. */ - def patch[B >: Char](from: Int, other: IterableOnce[B]^, replaced: Int): immutable.IndexedSeq[B] = { + def patch[B >: Char](from: Int, other: IterableOnce[B], replaced: Int): immutable.IndexedSeq[B] = { val len = s.length @`inline` def slc(off: Int, length: Int): WrappedString = new WrappedString(s.substring(off, off+length)) @@ -516,7 +515,7 @@ final class StringOps(private val s: String) extends AnyVal { * by `other`. * @note $unicodeunaware */ - def patch(from: Int, other: IterableOnce[Char]^, replaced: Int): String = + def patch(from: Int, other: IterableOnce[Char], replaced: Int): String = patch(from, other.iterator.mkString, replaced) /** Produces a new string where a slice of characters in this string is replaced by another string. @@ -1196,7 +1195,7 @@ final class StringOps(private val s: String) extends AnyVal { * All these operations apply to those chars of this string * which satisfy the predicate `p`. */ - def withFilter(p: Char => Boolean): StringOps.WithFilter^{p} = new StringOps.WithFilter(p, s) + def withFilter(p: Char => Boolean): StringOps.WithFilter = new StringOps.WithFilter(p, s) /** The rest of the string without its first char. * @note $unicodeunaware @@ -1247,7 +1246,7 @@ final class StringOps(private val s: String) extends AnyVal { def inits: Iterator[String] = iterateUntilEmpty(_.init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: String => String): Iterator[String]^{f} = + private[this] def iterateUntilEmpty(f: String => String): Iterator[String] = Iterator.iterate(s)(f).takeWhile(x => !x.isEmpty) ++ Iterator.single("") /** Selects all chars of this string which satisfy a predicate. */ @@ -1465,7 +1464,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]^): LazyZip2[Char, B, String]^{that} = new LazyZip2(s, new WrappedString(s), that) + def lazyZip[B](that: Iterable[B]): LazyZip2[Char, B, String] = new LazyZip2(s, new WrappedString(s), that) /* ************************************************************************************************************ @@ -1513,7 +1512,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string consisting of all the chars of this string without duplicates. * @note $unicodeunaware */ - def distinctBy[B](f: Char -> B): String = new WrappedString(s).distinctBy(f).unwrap + def distinctBy[B](f: Char => B): String = new WrappedString(s).distinctBy(f).unwrap /** Sorts the characters of this string according to an Ordering. * diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala new file mode 100644 index 000000000000..5479a58d485f --- /dev/null +++ b/tests/pos-special/stdlib/collection/StringParsers.scala @@ -0,0 +1,319 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec + +/** A module containing the implementations of parsers from strings to numeric types, and boolean + */ +private[scala] object StringParsers { + + //compile-time constant helpers + + //Int.MinValue == -2147483648 + private final val intOverflowBoundary = -214748364 + private final val intOverflowDigit = 9 + //Long.MinValue == -9223372036854775808L + private final val longOverflowBoundary = -922337203685477580L + private final val longOverflowDigit = 9 + + @inline + private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) + + @inline + private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { + @tailrec + def rec(i: Int, agg: Int): Option[Int] = + if (agg < min) None + else if (i == len) { + if (!isPositive) Some(agg) + else if (agg == min) None + else Some(-agg) + } + else { + val digit = decValue(from.charAt(i)) + if (digit == -1) None + else rec(i + 1, agg * 10 - digit) + } + rec(1, agg) + } + + @inline + private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' + + //bool + @inline + final def parseBool(from: String): Option[Boolean] = + if (from.equalsIgnoreCase("true")) Some(true) + else if (from.equalsIgnoreCase("false")) Some(false) + else None + + //integral types + final def parseByte(from: String): Option[Byte] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toByte) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte) + else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte) + else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte) + else None + } + } + + final def parseShort(from: String): Option[Short] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toShort) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort) + else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort) + else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort) + else None + } + } + + final def parseInt(from: String): Option[Int] = { + val len = from.length() + + @tailrec + def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { + if (i == len) { + if (!isPositive) Some(agg) + else if (agg == Int.MinValue) None + else Some(-agg) + } + else if (agg < intOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None + else step(i + 1, (agg * 10) - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + final def parseLong(from: String): Option[Long] = { + //like parseInt, but Longer + val len = from.length() + + @tailrec + def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { + if (i == len) { + if (isPositive && agg == Long.MinValue) None + else if (isPositive) Some(-agg) + else Some(agg) + } + else if (agg < longOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None + else step(i + 1, agg * 10 - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first).toLong + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + //floating point + final def checkFloatFormat(format: String): Boolean = { + //indices are tracked with a start index which points *at* the first index + //and an end index which points *after* the last index + //so that slice length === end - start + //thus start == end <=> empty slice + //and format.substring(start, end) is equivalent to the slice + + //some utilities for working with index bounds into the original string + @inline + def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { + @tailrec + def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) + rec(start) + } + + //one after last index for the predicate to hold, or `from` if none hold + //may point after the end of the string + @inline + def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { + @tailrec @inline + def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) + else i + rec(from) + } + + + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F')) + + def prefixOK(startIndex: Int, endIndex: Int): Boolean = { + val len = endIndex - startIndex + (len > 0) && { + //the prefix part is + //hexDigits + //hexDigits. + //hexDigits.hexDigits + //.hexDigits + //but not . + if (format.charAt(startIndex) == '.') { + (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) + } else { + val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) + (noLeading >= endIndex) || + ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) + } + } + } + + def postfixOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + (forAllBetween(startIndex, endIndex, isDigit)) || { + val startchar = format.charAt(startIndex) + (startchar == '+' || startchar == '-') && + (endIndex - startIndex > 1) && + forAllBetween(startIndex + 1, endIndex, isDigit) + } + } + // prefix [pP] postfix + val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) + (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) + } + + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + //invariant: endIndex > startIndex + + def isExp(c: Char): Boolean = c == 'e' || c == 'E' + + def expOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + val startChar = format.charAt(startIndex) + if (startChar == '+' || startChar == '-') + (endIndex > (startIndex + 1)) && + skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex + else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex + } + + //significant can be one of + //* digits.digits + //* .digits + //* digits. + //but not just . + val startChar = format.charAt(startIndex) + if (startChar == '.') { + val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) + // a digit is required followed by optional exp + (noSignificant > startIndex + 1) && (noSignificant >= endIndex || + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + ) + } + else if (isDigit(startChar)) { + // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent + val noInt = skipIndexWhile(isDigit, startIndex, endIndex) + // just the digits + (noInt == endIndex) || { + if (format.charAt(noInt) == '.') { + val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) + (noSignificant >= endIndex) || //no exponent + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + } else + isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) + } + } + else false + } + + //count 0x00 to 0x20 as "whitespace", and nothing else + val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) + val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 + + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false + else { + //all formats can have a sign + val unsigned = { + val startchar = format.charAt(unspacedStart) + if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart + } + if (unsigned >= unspacedEnd) false + //that's it for NaN and Infinity + else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" + else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" + else { + //all other formats can have a format suffix + val desuffixed = { + val endchar = format.charAt(unspacedEnd - 1) + if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 + else unspacedEnd + } + val len = desuffixed - unsigned + if (len <= 0) false + else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) + format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) + else isDecFloatLiteral(unsigned, desuffixed) + } + } + } + + @inline + def parseFloat(from: String): Option[Float] = + if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) + else None + + @inline + def parseDouble(from: String): Option[Double] = + if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) + else None + +} diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index 85910311a4c3..441790c3c6e5 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -15,8 +15,6 @@ package scala.collection import scala.annotation.{nowarn, tailrec} import scala.collection.mutable.{ArrayBuffer, Builder} import scala.collection.immutable.LazyList -import scala.annotation.unchecked.uncheckedCaptures -import language.experimental.captureChecking /** Views are collections whose transformation operations are non strict: the resulting elements * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), @@ -25,9 +23,8 @@ import language.experimental.captureChecking * @define Coll `View` */ trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { - this: View[A]^ => - override def view: View[A]^{this} = this + override def view: View[A] = this override def iterableFactory: IterableFactory[View] = View @@ -58,8 +55,8 @@ object View extends IterableFactory[View] { * * @tparam A View element type */ - def fromIteratorProvider[A](it: () => Iterator[A]^): View[A]^{it} = new AbstractView[A] { - def iterator: Iterator[A]^{it} = it() + def fromIteratorProvider[A](it: () => Iterator[A]): View[A] = new AbstractView[A] { + def iterator = it() } /** @@ -70,7 +67,7 @@ object View extends IterableFactory[View] { * * @tparam E View element type */ - def from[E](it: IterableOnce[E]^): View[E]^{it} = it match { + def from[E](it: IterableOnce[E]): View[E] = it match { case it: View[E] => it case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) case _ => LazyList.from(it).view @@ -100,7 +97,7 @@ object View extends IterableFactory[View] { /** A view with given elements */ @SerialVersionUID(3L) - class Elems[A](xs: A*) extends AbstractView[A], Pure { + class Elems[A](xs: A*) extends AbstractView[A] { def iterator = xs.iterator override def knownSize = xs.knownSize override def isEmpty: Boolean = xs.isEmpty @@ -109,7 +106,7 @@ object View extends IterableFactory[View] { /** A view containing the results of some element computation a number of times. */ @SerialVersionUID(3L) class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { - def iterator: Iterator[A]^{elem} = Iterator.fill(n)(elem) + def iterator = Iterator.fill(n)(elem) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -117,7 +114,7 @@ object View extends IterableFactory[View] { /** A view containing values of a given function over a range of integer values starting from 0. */ @SerialVersionUID(3L) class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { - def iterator: Iterator[A]^{f} = Iterator.tabulate(n)(f) + def iterator: Iterator[A] = Iterator.tabulate(n)(f) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -125,7 +122,7 @@ object View extends IterableFactory[View] { /** A view containing repeated applications of a function to a start value */ @SerialVersionUID(3L) class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { - def iterator: Iterator[A]^{f} = Iterator.iterate(start)(f).take(len) + def iterator: Iterator[A] = Iterator.iterate(start)(f).take(len) override def knownSize: Int = 0 max len override def isEmpty: Boolean = len <= 0 } @@ -135,7 +132,7 @@ object View extends IterableFactory[View] { */ @SerialVersionUID(3L) class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { - def iterator: Iterator[A]^{f} = Iterator.unfold(initial)(f) + def iterator: Iterator[A] = Iterator.unfold(initial)(f) } /** An `IterableOps` whose collection type and collection type constructor are unknown */ @@ -143,14 +140,14 @@ object View extends IterableFactory[View] { /** A view that filters an underlying collection. */ @SerialVersionUID(3L) - class Filter[A](val underlying: SomeIterableOps[A]^, val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying, p} = underlying.iterator.filterImpl(p, isFlipped) + class Filter[A](val underlying: SomeIterableOps[A], val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { + def iterator = underlying.iterator.filterImpl(p, isFlipped) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } object Filter { - def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = + def apply[A](underlying: Iterable[A], p: A => Boolean, isFlipped: Boolean): Filter[A] = underlying match { case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) case _ => new Filter(underlying, p, isFlipped) @@ -159,15 +156,15 @@ object View extends IterableFactory[View] { /** A view that removes the duplicated elements as determined by the transformation function `f` */ @SerialVersionUID(3L) - class DistinctBy[A, B](underlying: SomeIterableOps[A]^, f: A -> B) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.distinctBy(f) + class DistinctBy[A, B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.distinctBy(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A1] { - def iterator: Iterator[A1]^{underlying, f} = new AbstractIterator[A1] { + class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A1] { + def iterator = new AbstractIterator[A1] { private[this] val self = underlying.iterator private[this] var hd: A1 = _ private[this] var hdDefined: Boolean = false @@ -191,8 +188,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A2] { - def iterator: Iterator[A2]^{this} = new AbstractIterator[A2] { + class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A2] { + def iterator = new AbstractIterator[A2] { private[this] val self = underlying.iterator private[this] var hd: A2 = _ private[this] var hdDefined: Boolean = false @@ -217,8 +214,8 @@ object View extends IterableFactory[View] { /** A view that drops leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Drop[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.drop(n) + class Drop[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = underlying.iterator.drop(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -229,8 +226,8 @@ object View extends IterableFactory[View] { /** A view that drops trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = dropRightIterator(underlying.iterator, n) + class DropRight[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = dropRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -242,16 +239,16 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class DropWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying, p} = underlying.iterator.dropWhile(p) + class DropWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { + def iterator = underlying.iterator.dropWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that takes leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Take[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.take(n) + class Take[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = underlying.iterator.take(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -262,8 +259,8 @@ object View extends IterableFactory[View] { /** A view that takes trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = takeRightIterator(underlying.iterator, n) + class TakeRight[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = takeRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -275,15 +272,15 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class TakeWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying, p} = underlying.iterator.takeWhile(p) + class TakeWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.takeWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } @SerialVersionUID(3L) - class ScanLeft[+A, +B](underlying: SomeIterableOps[A]^, z: B, op: (B, A) => B) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, op} = underlying.iterator.scanLeft(z)(op) + class ScanLeft[+A, +B](underlying: SomeIterableOps[A], z: B, op: (B, A) => B) extends AbstractView[B] { + def iterator: Iterator[B] = underlying.iterator.scanLeft(z)(op) override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -293,32 +290,32 @@ object View extends IterableFactory[View] { /** A view that maps elements of the underlying collection. */ @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeIterableOps[A]^, f: A => B) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, f} = underlying.iterator.map(f) + class Map[+A, +B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[B] { + def iterator = underlying.iterator.map(f) override def knownSize = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } /** A view that flatmaps elements of the underlying collection. */ @SerialVersionUID(3L) - class FlatMap[A, B](underlying: SomeIterableOps[A]^, f: A => IterableOnce[B]^) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, f} = underlying.iterator.flatMap(f) + class FlatMap[A, B](underlying: SomeIterableOps[A], f: A => IterableOnce[B]) extends AbstractView[B] { + def iterator = underlying.iterator.flatMap(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that collects elements of the underlying collection. */ @SerialVersionUID(3L) - class Collect[+A, B](underlying: SomeIterableOps[A]^, pf: PartialFunction[A, B]^) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, pf} = underlying.iterator.collect(pf) + class Collect[+A, B](underlying: SomeIterableOps[A], pf: PartialFunction[A, B]) extends AbstractView[B] { + def iterator = underlying.iterator.collect(pf) } /** A view that concatenates elements of the prefix collection or iterator with the elements * of the suffix collection or iterator. */ @SerialVersionUID(3L) - class Concat[A](prefix: SomeIterableOps[A]^, suffix: SomeIterableOps[A]^) extends AbstractView[A] { - def iterator: Iterator[A]^{prefix, suffix} = prefix.iterator ++ suffix.iterator + class Concat[A](prefix: SomeIterableOps[A], suffix: SomeIterableOps[A]) extends AbstractView[A] { + def iterator = prefix.iterator ++ suffix.iterator override def knownSize = { val prefixSize = prefix.knownSize if (prefixSize >= 0) { @@ -335,8 +332,8 @@ object View extends IterableFactory[View] { * of another collection. */ @SerialVersionUID(3L) - class Zip[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^) extends AbstractView[(A, B)] { - def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zip(other) + class Zip[A, B](underlying: SomeIterableOps[A], other: Iterable[B]) extends AbstractView[(A, B)] { + def iterator = underlying.iterator.zip(other) override def knownSize = { val s1 = underlying.knownSize if (s1 == 0) 0 else { @@ -352,8 +349,8 @@ object View extends IterableFactory[View] { * placeholder elements are used to extend the shorter collection to the length of the longer. */ @SerialVersionUID(3L) - class ZipAll[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^, thisElem: A, thatElem: B) extends AbstractView[(A, B)] { - def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zipAll(other, thisElem, thatElem) + class ZipAll[A, B](underlying: SomeIterableOps[A], other: Iterable[B], thisElem: A, thatElem: B) extends AbstractView[(A, B)] { + def iterator = underlying.iterator.zipAll(other, thisElem, thatElem) override def knownSize = { val s1 = underlying.knownSize if(s1 == -1) -1 else { @@ -366,10 +363,8 @@ object View extends IterableFactory[View] { /** A view that appends an element to its elements */ @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIterableOps[A]^, elem: A) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = - val ct = new Concat(underlying, new View.Single(elem)) - ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error + class Appended[+A](underlying: SomeIterableOps[A], elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = new Concat(underlying, new View.Single(elem)).iterator override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -379,10 +374,8 @@ object View extends IterableFactory[View] { /** A view that prepends an element to its elements */ @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIterableOps[A]^) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = - val ct = new Concat(new View.Single(elem), underlying) - ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error + class Prepended[+A](elem: A, underlying: SomeIterableOps[A]) extends AbstractView[A] { + def iterator: Iterator[A] = new Concat(new View.Single(elem), underlying).iterator override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -391,8 +384,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class Updated[A](underlying: SomeIterableOps[A]^, index: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = new AbstractIterator[A] { + class Updated[A](underlying: SomeIterableOps[A], index: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = new AbstractIterator[A] { private[this] val it = underlying.iterator private[this] var i = 0 def next(): A = { @@ -410,28 +403,28 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - private[collection] class Patched[A](underlying: SomeIterableOps[A]^, from: Int, other: IterableOnce[A]^, replaced: Int) extends AbstractView[A] { + private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { // we may be unable to traverse `other` more than once, so we need to cache it if that's the case - private val _other: Iterable[A]^{other} = other match { + private val _other: Iterable[A] = other match { case other: Iterable[A] => other case other => LazyList.from(other) } - def iterator: Iterator[A]^{underlying, other} = underlying.iterator.patch(from, _other.iterator, replaced) + def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty } @SerialVersionUID(3L) - class ZipWithIndex[A](underlying: SomeIterableOps[A]^) extends AbstractView[(A, Int)] { - def iterator: Iterator[(A, Int)]^{underlying} = underlying.iterator.zipWithIndex + class ZipWithIndex[A](underlying: SomeIterableOps[A]) extends AbstractView[(A, Int)] { + def iterator: Iterator[(A, Int)] = underlying.iterator.zipWithIndex override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class PadTo[A](underlying: SomeIterableOps[A]^, len: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.padTo(len, elem) + class PadTo[A](underlying: SomeIterableOps[A], len: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.padTo(len, elem) override def knownSize: Int = { val size = underlying.knownSize @@ -440,7 +433,7 @@ object View extends IterableFactory[View] { override def isEmpty: Boolean = underlying.isEmpty && len <= 0 } - private[collection] def takeRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { + private[collection] def takeRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { val k = it.knownSize if(k == 0 || n <= 0) Iterator.empty else if(n == Int.MaxValue) it @@ -448,23 +441,22 @@ object View extends IterableFactory[View] { else new TakeRightIterator[A](it, n) } - private final class TakeRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { - private[this] var current: Iterator[A @uncheckedCaptures]^{underlying} = underlying + private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ def init(): Unit = if(buf eq null) { buf = new ArrayBuffer[AnyRef](maxlen min 256) len = 0 - while(current.hasNext) { - val n = current.next().asInstanceOf[AnyRef] + while(underlying.hasNext) { + val n = underlying.next().asInstanceOf[AnyRef] if(pos >= buf.length) buf.addOne(n) else buf(pos) = n pos += 1 if(pos == maxlen) pos = 0 len += 1 } - current = null + underlying = null if(len > maxlen) len = maxlen pos = pos - len if(pos < 0) pos += maxlen @@ -485,7 +477,7 @@ object View extends IterableFactory[View] { x } } - override def drop(n: Int): Iterator[A]^{this} = { + override def drop(n: Int): Iterator[A] = { init() if (n > 0) { len = (len - n) max 0 @@ -495,7 +487,7 @@ object View extends IterableFactory[View] { } } - private[collection] def dropRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { + private[collection] def dropRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { if(n <= 0) it else { val k = it.knownSize @@ -504,7 +496,7 @@ object View extends IterableFactory[View] { } } - private final class DropRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { + private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala new file mode 100644 index 000000000000..4699abbef5a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -0,0 +1,70 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ +@SerialVersionUID(3L) +abstract class WithFilter[+A, +CC[_]] extends Serializable { + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given function `f` to each element of the filtered outer $coll + * and collecting the results. + */ + def map[B](f: A => B): CC[B] + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll containing this `WithFilter` instance that satisfy + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given collection-valued function `f` to each element + * of the filtered outer $coll and + * concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]): CC[B] + + /** Applies a function `f` to all elements of the `filtered` outer $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + */ + def foreach[U](f: A => U): Unit + + /** Further refines the filter for this `filtered` $coll. + * + * @param q the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll which + * also satisfy both `p` and `q` predicates. + */ + def withFilter(q: A => Boolean): WithFilter[A, CC] + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/BasicNode.java b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java new file mode 100644 index 000000000000..c6ec91e4fde8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java @@ -0,0 +1,19 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +public abstract class BasicNode { + + public abstract String string(int lev); + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java new file mode 100644 index 000000000000..ddffa365234e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; + +abstract class CNodeBase extends MainNode { + + @SuppressWarnings("unchecked") + public static final AtomicIntegerFieldUpdater> updater = + AtomicIntegerFieldUpdater.newUpdater((Class>) (Class) CNodeBase.class, "csize"); + + public volatile int csize = -1; + + public boolean CAS_SIZE(int oldval, int nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_SIZE(int nval) { + updater.set(this, nval); + } + + public int READ_SIZE() { + return updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Gen.java b/tests/pos-special/stdlib/collection/concurrent/Gen.java new file mode 100644 index 000000000000..07af2983f32d --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Gen.java @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +final class Gen {} diff --git a/tests/pos-special/stdlib/collection/concurrent/INodeBase.java b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java new file mode 100644 index 000000000000..dfb99806594f --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class INodeBase extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) INodeBase.class, (Class>) (Class) MainNode.class, "mainnode"); + + static final Object RESTART = new Object(); + + static final Object NO_SUCH_ELEMENT_SENTINEL = new Object(); + + public volatile MainNode mainnode = null; + + public final Gen gen; + + public INodeBase(Gen generation) { + gen = generation; + } + + public BasicNode prev() { + return null; + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/MainNode.java b/tests/pos-special/stdlib/collection/concurrent/MainNode.java new file mode 100644 index 000000000000..f7f022974e9e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/MainNode.java @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class MainNode extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) MainNode.class, (Class>) (Class) MainNode.class, "prev"); + + public volatile MainNode prev = null; + + public abstract int cachedSize(Object ct); + + // standard contract + public abstract int knownSize(); + + public boolean CAS_PREV(MainNode oldval, MainNode nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_PREV(MainNode nval) { + updater.set(this, nval); + } + + // do we need this? unclear in the javadocs... + // apparently not - volatile reads are supposed to be safe + // regardless of whether there are concurrent ARFU updates + @Deprecated @SuppressWarnings("unchecked") + public MainNode READ_PREV() { + return (MainNode) updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala new file mode 100644 index 000000000000..c2b996b93102 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala @@ -0,0 +1,192 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.concurrent + +import scala.annotation.tailrec + +/** A template trait for mutable maps that allow concurrent access. + * + * $concurrentmapinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] + * section on `Concurrent Maps` for more information. + * + * @tparam K the key type of the map + * @tparam V the value type of the map + * + * @define Coll `concurrent.Map` + * @define coll concurrent map + * @define concurrentmapinfo + * This is a base trait for all Scala concurrent map implementations. It + * provides all of the methods a `Map` does, with the difference that all the + * changes are atomic. It also describes methods specific to concurrent maps. + * + * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. + * + * @define atomicop + * This is an atomic operation. + */ +trait Map[K, V] extends scala.collection.mutable.Map[K, V] { + + /** + * Associates the given key with a given value, unless the key was already + * associated with some other value. + * + * $atomicop + * + * @param k key with which the specified value is to be associated with + * @param v value to be associated with the specified key + * @return `Some(oldvalue)` if there was a value `oldvalue` previously + * associated with the specified key, or `None` if there was no + * mapping for the specified key + */ + def putIfAbsent(k: K, v: V): Option[V] + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + def remove(k: K, v: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldvalue value expected to be associated with the specified key + * if replacing is to happen + * @param newvalue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + def replace(k: K, oldvalue: V, newvalue: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped + * to some value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param v value to be associated with the specified key + * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise + */ + def replace(k: K, v: V): Option[V] + + override def getOrElseUpdate(key: K, op: => V): V = get(key) match { + case Some(v) => v + case None => + val v = op + putIfAbsent(key, v) match { + case Some(ov) => ov + case None => v + } + } + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldValue value expected to be associated with the specified key + * if replacing is to happen + * @param newValue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) + + @tailrec + private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = get(key) + val nextValue = remappingFunction(previousValue) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None + } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } + } + updateWithAux(key)(remappingFunction) + } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (!p(k, v)) removeRefEq(k, v) + } + this + } + + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replaceRefEq(k, v, f(k, v)) + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala new file mode 100644 index 000000000000..e4aa8c8c52a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala @@ -0,0 +1,1202 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package concurrent + +import java.util.concurrent.atomic._ +import scala.{unchecked => uc} +import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{List, Nil} +import scala.collection.mutable.GrowableBuilder +import scala.util.Try +import scala.util.hashing.Hashing + +private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { + import INodeBase._ + + WRITE(bn) + + def this(g: Gen, equiv: Equiv[K]) = this(null, g, equiv) + + def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) + + def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + + def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct) + + def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = { + val m = /*READ*/mainnode + val prevval = /*READ*/m.prev + if (prevval eq null) m + else GCAS_Complete(m, ct) + } + + @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else { + // complete the GCAS + val prev = /*READ*/m.prev + val ctr = ct.readRoot(abort = true) + + prev match { + case null => + m + case fn: FailedNode[_, _] => // try to commit to previous value + if (CAS(m, fn.prev)) fn.prev + else GCAS_Complete(/*READ*/mainnode, ct) + case vn: MainNode[_, _] => + // Assume that you've read the root from the generation G. + // Assume that the snapshot algorithm is correct. + // ==> you can only reach nodes in generations <= G. + // ==> `gen` is <= G. + // We know that `ctr.gen` is >= G. + // ==> if `ctr.gen` = `gen` then they are both equal to G. + // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, + // or both + if ((ctr.gen eq gen) && ct.nonReadOnly) { + // try to commit + if (m.CAS_PREV(prev, null)) m + else GCAS_Complete(m, ct) + } else { + // try to abort + m.CAS_PREV(prev, new FailedNode(prev)) + GCAS_Complete(/*READ*/mainnode, ct) + } + } + } + + def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = { + n.WRITE_PREV(old) + if (CAS(old, n)) { + GCAS_Complete(n, ct) + /*READ*/n.prev eq null + } else false + } + + private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2) + + private def inode(cn: MainNode[K, V]) = { + val nin = new INode[K, V](gen, equiv) + nin.WRITE(cn) + nin + } + + def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = { + val nin = new INode[K, V](ngen, equiv) + val main = GCAS_READ(ct) + nin.WRITE(main) + nin + } + + /** Inserts a key value pair, overwriting the old pair if the keys match. + * + * @return true if successful, false otherwise + */ + @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) + else false + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + GCAS(cn, nn, ct) + } + case basicNode => throw new MatchError(basicNode) + } + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + GCAS(cn, ncnode, ct) + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + false + case ln: LNode[K, V] => // 3) an l-node + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + case mainNode => throw new MatchError(mainNode) + } + } + + + + /** Inserts a new key value pair, given that a specific condition is met. + * + * @param cond KEY_PRESENT_OR_ABSENT - don't care if the key was there, insert or overwrite + * KEY_ABSENT - key wasn't there, insert only, do not overwrite + * KEY_PRESENT - key was there, overwrite only, do not insert + * other value `v` - only overwrite if the current value is this + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value + * @param hc the hashcode of `k` + * + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => cond match { + case INode.KEY_PRESENT_OR_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_PRESENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + case otherv => + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + } + case basicNode => throw new MatchError(basicNode) + } + } else cond match { + case INode.KEY_PRESENT_OR_ABSENT | INode.KEY_ABSENT => + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + if (GCAS(cn, ncnode, ct)) None else null + case INode.KEY_PRESENT => None + case otherv => None + } + case sn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => // 3) an l-node + def insertln() = { + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + cond match { + case INode.KEY_PRESENT_OR_ABSENT => + val optv = ln.get(k) + if (insertln()) optv else null + case INode.KEY_ABSENT => + ln.get(k) match { + case None => if (insertln()) None else null + case optv => optv + } + case INode.KEY_PRESENT => + ln.get(k) match { + case Some(v0) => if (insertln()) Some(v0) else null + case None => None + } + case otherv => + ln.get(k) match { + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null + case _ => None + } + } + case mainNode => throw new MatchError(mainNode) + } + } + + /** Looks up the value associated with the key. + * + * @param hc the hashcode of `k` + * + * @return NO_SUCH_ELEMENT_SENTINEL if no value has been found, RESTART if the operation wasn't successful, + * or any other value otherwise + */ + @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multinode + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + if ((bmp & flag) == 0) NO_SUCH_ELEMENT_SENTINEL // 1a) bitmap shows no binding + else { // 1b) bitmap contains a value - descend + val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + sub match { + case in: INode[K, V] @uc => + if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) + else RESTART + } + case sn: SNode[K, V] @uc => // 2) singleton node + if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + case basicNode => throw new MatchError(basicNode) + } + } + case tn: TNode[_, _] => // 3) non-live node + def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { + clean(parent, ct, lev - 5) + RESTART + } else { + if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + } + cleanReadOnly(tn) + case ln: LNode[K, V] => // 5) an l-node + ln.get(k).asInstanceOf[Option[AnyRef]].getOrElse(NO_SUCH_ELEMENT_SENTINEL) + case mainNode => throw new MatchError(mainNode) + } + } + + /** Removes the key associated with the given value. + * + * @param hc the hashcode of `k` + * + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + def rec_remove( + k: K, + v: V, + removalPolicy: Int, + hc: Int, + lev: Int, + parent: INode[K, V], + startgen: Gen, + ct: TrieMap[K, V]): Option[V] = { + + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => + val idx = (hc >>> lev) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) None + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + val res = sub match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { + val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) + if (GCAS(cn, ncn, ct)) Some(sn.v) else null + } else None + case basicNode => throw new MatchError(basicNode) + } + + if (res == None || (res eq null)) res + else { + @tailrec def cleanParent(nonlive: AnyRef): Unit = { + val pm = parent.GCAS_READ(ct) + pm match { + case cn: CNode[K, V] => + val idx = (hc >>> (lev - 5)) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => + val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) + if (!parent.GCAS(cn, ncn, ct)) + if (ct.readRoot().gen == startgen) cleanParent(nonlive) + } + } + case _ => // parent is no longer a cnode, we're done + } + } + + if (parent ne null) { // never tomb at root + val n = GCAS_READ(ct) + if (n.isInstanceOf[TNode[_, _]]) + cleanParent(n) + } + + res + } + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => + if (removalPolicy == RemovalPolicy.Always) { + val optv = ln.get(k) + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + } else ln.get(k) match { + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + case _ => None + } + case mainNode => throw new MatchError(mainNode) + } + } + + private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int): Unit = { + val m = nd.GCAS_READ(ct) + m match { + case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) + case _ => + } + } + + def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null + + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() + + /* this is a quiescent method! */ + def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { + case null => "" + case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) + case cn: CNode[_, _] => cn.string(lev) + case ln: LNode[_, _] => ln.string(lev) + case x => "".format(x) + }) + +} + + +private[concurrent] object INode { + //////////////////////////////////////////////////////////////////////////////////////////////////// + // Arguments for `cond` argument in TrieMap#rec_insertif + //////////////////////////////////////////////////////////////////////////////////////////////////// + final val KEY_PRESENT = new AnyRef + final val KEY_ABSENT = new AnyRef + final val KEY_PRESENT_OR_ABSENT = new AnyRef + + def newRootNode[K, V](equiv: Equiv[K]) = { + val gen = new Gen + val cn = new CNode[K, V](0, new Array(0), gen) + new INode[K, V](cn, gen, equiv) + } +} + + +private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { + WRITE_PREV(p) + + def string(lev: Int) = throw new UnsupportedOperationException + + def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + + def knownSize: Int = throw new UnsupportedOperationException + + override def toString = "FailedNode(%s)".format(p) +} + + +private[concurrent] trait KVNode[K, V] { + def kvPair: (K, V) +} + + +private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) + extends BasicNode with KVNode[K, V] { + def copy = new SNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) +} + +// Tomb Node, used to ensure proper ordering during removals +private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) + extends MainNode[K, V] with KVNode[K, V] { + def copy = new TNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 + def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) +} + +// List Node, leaf node that handles hash collisions +private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) + extends MainNode[K, V] { + + def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) + + def this(k1: K, v1: V, k2: K, v2: V, equiv: Equiv[K]) = + this(if (equiv.equiv(k1, k2)) (k2 -> v2) :: Nil else (k1 -> v1) :: (k2 -> v2) :: Nil, equiv) + + def inserted(k: K, v: V) = { + var k0: K = k + @tailrec + def remove(elems: List[(K, V)], acc: List[(K, V)]): List[(K, V)] = { + if (elems.isEmpty) acc + else if (equiv.equiv(elems.head._1, k)) { + k0 = elems.head._1 + acc ::: elems.tail + } else remove(elems.tail, elems.head :: acc) + } + val e = remove(entries, Nil) + new LNode((k0 -> v) :: e, equiv) + } + + def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { + val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) + else { + val (k, v) = updmap.iterator.next() + new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses + } + } + + def get(k: K): Option[V] = entries.find(entry => equiv.equiv(entry._1, k)).map(_._2) + + def cachedSize(ct: AnyRef): Int = entries.size + + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) + +} + +// Ctrie Node, contains bitmap and array of references to branch nodes +private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { + // this should only be called from within read-only snapshots + def cachedSize(ct: AnyRef): Int = { + val currsz = READ_SIZE() + if (currsz != -1) currsz + else { + val sz = computeSize(ct.asInstanceOf[TrieMap[K, V]]) + while (READ_SIZE() == -1) CAS_SIZE(-1, sz) + READ_SIZE() + } + } + + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + + // lends itself towards being parallelizable by choosing + // a random starting offset in the array + // => if there are concurrent size computations, they start + // at different positions, so they are more likely to + // to be independent + private def computeSize(ct: TrieMap[K, V]): Int = { + var i = 0 + var sz = 0 + val offset = + if (array.length > 0) + //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ + java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length) + else 0 + while (i < array.length) { + val pos = (i + offset) % array.length + array(pos) match { + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + sz + } + + def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) + narr(pos) = nn + new CNode[K, V](bitmap, narr, gen) + } + + def removedAt(pos: Int, flag: Int, gen: Gen) = { + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len - 1) + Array.copy(arr, 0, narr, 0, pos) + Array.copy(arr, pos + 1, narr, pos, len - pos - 1) + new CNode[K, V](bitmap ^ flag, narr, gen) + } + + def insertedAt(pos: Int, flag: Int, k: K, v: V, hc: Int, gen: Gen) = { + val len = array.length + val bmp = bitmap + val narr = new Array[BasicNode](len + 1) + Array.copy(array, 0, narr, 0, pos) + narr(pos) = new SNode(k, v, hc) + Array.copy(array, pos, narr, pos + 1, len - pos) + new CNode[K, V](bmp | flag, narr, gen) + } + + /** Returns a copy of this cnode such that all the i-nodes below it are copied + * to the specified generation `ngen`. + */ + def renewed(ngen: Gen, ct: TrieMap[K, V]) = { + var i = 0 + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len) + while (i < len) { + arr(i) match { + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn + } + i += 1 + } + new CNode[K, V](bitmap, narr, ngen) + } + + private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { + case tn: TNode[_, _] => tn.copyUntombed + case _ => inode + } + + def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { + case sn: SNode[K, V] @uc => sn.copyTombed + case _ => this + } else this + + // - if the branching factor is 1 for this CNode, and the child + // is a tombed SNode, returns its tombed version + // - otherwise, if there is at least one non-null node below, + // returns the version of this node with at least some null-inodes + // removed (those existing when the op began) + // - if there are only null-i-nodes below, returns null + def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = { + val bmp = bitmap + var i = 0 + val arr = array + val tmparray = new Array[BasicNode](arr.length) + while (i < arr.length) { // construct new bitmap + val sub = arr(i) + sub match { + case in: INode[K, V] @uc => + val inodemain = in.gcasRead(ct) + assert(inodemain ne null) + tmparray(i) = resurrect(in, inodemain) + case sn: SNode[K, V] @uc => + tmparray(i) = sn + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + + new CNode[K, V](bmp, tmparray, gen).toContracted(lev) + } + + def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) + + override def toString = { + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" + } +} + +private[concurrent] object CNode { + + def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { + val xidx = (xhc >>> lev) & 0x1f + val yidx = (yhc >>> lev) & 0x1f + val bmp = (1 << xidx) | (1 << yidx) + if (xidx == yidx) { + val subinode = new INode[K, V](gen, equiv)//(TrieMap.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen, equiv) + new CNode(bmp, Array(subinode), gen) + } else { + if (xidx < yidx) new CNode(bmp, Array(x, y), gen) + else new CNode(bmp, Array(y, x), gen) + } + } else { + new LNode(x.k, x.v, y.k, y.v, equiv) + } + +} + + +private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { + @volatile var committed = false +} + + +/** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] + */ +@SerialVersionUID(-5212455458703321708L) +final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) + extends scala.collection.mutable.AbstractMap[K, V] + with scala.collection.concurrent.Map[K, V] + with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] + with scala.collection.MapFactoryDefaults[K, V, TrieMap, mutable.Iterable] + with DefaultSerializable { + + private[this] var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf + private[this] var equalityobj = ef + @transient + private[this] var rootupdater = rtupd + def hashing = hashingobj + def equality = equalityobj + @volatile private var root = r + + def this(hashf: Hashing[K], ef: Equiv[K]) = this( + INode.newRootNode(ef), + AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), + hashf, + ef + ) + + def this() = this(Hashing.default, Equiv.universal) + + override def mapFactory: MapFactory[TrieMap] = TrieMap + + /* internal methods */ + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.writeObject(hashingobj) + out.writeObject(equalityobj) + + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + out.writeObject(k) + out.writeObject(v) + } + out.writeObject(TrieMapSerializationEnd) + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + root = INode.newRootNode(equality) + rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") + + hashingobj = in.readObject().asInstanceOf[Hashing[K]] + equalityobj = in.readObject().asInstanceOf[Equiv[K]] + + var obj: AnyRef = in.readObject() + + while (obj != TrieMapSerializationEnd) { + obj = in.readObject() + if (obj != TrieMapSerializationEnd) { + val k = obj.asInstanceOf[K] + val v = in.readObject().asInstanceOf[V] + update(k, v) + } + } + } + + private def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + + private[collection] def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) + + private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + val r = /*READ*/root + r match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) + } + } + + @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { + val v = /*READ*/root + v match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => + val RDCSS_Descriptor(ov, exp, nv) = desc + if (abort) { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } else { + val oldmain = ov.gcasRead(this) + if (oldmain eq exp) { + if (CAS_ROOT(desc, nv)) { + desc.committed = true + nv + } else RDCSS_Complete(abort) + } else { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } + } + case x => throw new MatchError(x) + } + } + + private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { + val desc = RDCSS_Descriptor(ov, expectedmain, nv) + if (CAS_ROOT(ov, desc)) { + RDCSS_Complete(abort = false) + /*READ*/desc.committed + } else false + } + + @tailrec private def inserthc(k: K, hc: Int, v: V): Unit = { + val r = RDCSS_READ_ROOT() + if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) + } + + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { + val r = RDCSS_READ_ROOT() + + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) + else ret + } + + /** Finds the value associated with this key + * + * @param k the key to look up + * @param hc the hashcode of `k` + * + * @return the value: V associated with `k`, if it exists. Otherwise, INodeBase.NO_SUCH_ELEMENT_SENTINEL + */ + @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + val res = r.rec_lookup(k, hc, 0, null, r.gen, this) + if (res eq INodeBase.RESTART) lookuphc(k, hc) + else res + } + + /** Removes a key-value pair from the map + * + * @param k the key to remove + * @param v the value compare with the value found associated with the key + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * @return an Option[V] indicating the previous value + */ + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { + val r = RDCSS_READ_ROOT() + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) + if (res ne null) res + else removehc(k, v, removalPolicy, hc) + } + + + def string = RDCSS_READ_ROOT().string(0) + + /* public methods */ + + def isReadOnly = rootupdater eq null + + def nonReadOnly = rootupdater ne null + + /** Returns a snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this TrieMap are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * TrieMap is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ + @tailrec def snapshot(): TrieMap[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality) + else snapshot() + } + + /** Returns a read-only snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this TrieMap are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this TrieMap by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ + @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality) + else readOnlySnapshot() + } + + @tailrec override def clear(): Unit = { + val r = RDCSS_READ_ROOT() + if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V](equality))) clear() + } + + def computeHash(k: K) = hashingobj.hash(k) + + @deprecated("Use getOrElse(k, null) instead.", "2.13.0") + def lookup(k: K): V = { + val hc = computeHash(k) + val lookupRes = lookuphc(k, hc) + val res = if (lookupRes == INodeBase.NO_SUCH_ELEMENT_SENTINEL) null else lookupRes + res.asInstanceOf[V] + } + + override def apply(k: K): V = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) throw new NoSuchElementException + else res.asInstanceOf[V] + } + + def get(k: K): Option[V] = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) None else Some(res).asInstanceOf[Option[V]] + } + + override def put(key: K, value: V): Option[V] = { + val hc = computeHash(key) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) + } + + override def update(k: K, v: V): Unit = { + val hc = computeHash(k) + inserthc(k, hc, v) + } + + def addOne(kv: (K, V)) = { + update(kv._1, kv._2) + this + } + + override def remove(k: K): Option[V] = { + val hc = computeHash(k) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) + } + + def subtractOne(k: K) = { + remove(k) + this + } + + def putIfAbsent(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) + } + + // TODO once computeIfAbsent is added to concurrent.Map, + // move the comment there and tweak the 'at most once' part + /** If the specified key is not already in the map, computes its value using + * the given thunk `op` and enters it into the map. + * + * If the specified mapping function throws an exception, + * that exception is rethrown. + * + * Note: This method will invoke op at most once. + * However, `op` may be invoked without the result being added to the map if + * a concurrent process is also trying to add a value corresponding to the + * same key `k`. + * + * @param k the key to modify + * @param op the expression that computes the value + * @return the newly added value + */ + override def getOrElseUpdate(k: K, op: => V): V = { + val hc = computeHash(k) + lookuphc(k, hc) match { + case INodeBase.NO_SUCH_ELEMENT_SENTINEL => + val v = op + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { + case Some(oldValue) => oldValue + case None => v + } + case oldValue => oldValue.asInstanceOf[V] + } + } + + def remove(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[collection] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty + } + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty + } + + def replace(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) + } + + def iterator: Iterator[(K, V)] = { + if (nonReadOnly) readOnlySnapshot().iterator + else new TrieMapIterator(0, this) + } + + //////////////////////////////////////////////////////////////////////////// + // + // scala/bug#10177 These methods need overrides as the inherited implementations + // call `.iterator` more than once, which doesn't guarantee a coherent + // view of the data if there is a concurrent writer + // Note that the we don't need overrides for keysIterator or valuesIterator + // TrieMapTest validates the behaviour. + override def values: Iterable[V] = { + if (nonReadOnly) readOnlySnapshot().values + else super.values + } + override def keySet: Set[K] = { + if (nonReadOnly) readOnlySnapshot().keySet + else super.keySet + } + + override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view + + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + override def filterKeys(p: K => Boolean): collection.MapView[K, V] = view.filterKeys(p) + + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + override def mapValues[W](f: V => W): collection.MapView[K, W] = view.mapValues(f) + // END extra overrides + /////////////////////////////////////////////////////////////////// + + override def size: Int = + if (nonReadOnly) readOnlySnapshot().size + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize + override protected[this] def className = "TrieMap" + + override def lastOption: Option[(K, V)] = if (isEmpty) None else Try(last).toOption +} + + +@SerialVersionUID(3L) +object TrieMap extends MapFactory[TrieMap] { + + def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] + + def from[K, V](it: IterableOnce[(K, V)]): TrieMap[K, V] = new TrieMap[K, V]() ++= it + + def newBuilder[K, V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) + + @transient + val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + + class MangledHashing[K] extends Hashing[K] { + def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) + } + + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } +} + +// non-final as an extension point for parallel collections +private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { + private val stack = new Array[Array[BasicNode]](7) + private val stackpos = new Array[Int](7) + private var depth = -1 + private var subiter: Iterator[(K, V)] = null + private var current: KVNode[K, V] = null + + if (mustInit) initialize() + + def hasNext = (current ne null) || (subiter ne null) + + def next() = if (hasNext) { + var r: (K, V) = null + if (subiter ne null) { + r = subiter.next() + checkSubiter() + } else { + r = current.kvPair + advance() + } + r + } else Iterator.empty.next() + + private def readin(in: INode[K, V]) = in.gcasRead(ct) match { + case cn: CNode[K, V] => + depth += 1 + stack(depth) = cn.array + stackpos(depth) = -1 + advance() + case tn: TNode[K, V] => + current = tn + case ln: LNode[K, V] => + subiter = ln.entries.iterator + checkSubiter() + case null => + current = null + case mainNode => throw new MatchError(mainNode) + } + + private def checkSubiter() = if (!subiter.hasNext) { + subiter = null + advance() + } + + private def initialize(): Unit = { + assert(ct.isReadOnly) + + val r = ct.RDCSS_READ_ROOT() + readin(r) + } + + @tailrec + final def advance(): Unit = if (depth >= 0) { + val npos = stackpos(depth) + 1 + if (npos < stack(depth).length) { + stackpos(depth) = npos + stack(depth)(npos) match { + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) + } + } else { + depth -= 1 + advance() + } + } else current = null + + protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): TrieMapIterator[K, V] = new TrieMapIterator[K, V](_lev, _ct, _mustInit) + + protected def dupTo(it: TrieMapIterator[K, V]): Unit = { + it.level = this.level + it.ct = this.ct + it.depth = this.depth + it.current = this.current + + // these need a deep copy + Array.copy(this.stack, 0, it.stack, 0, 7) + Array.copy(this.stackpos, 0, it.stackpos, 0, 7) + + // this one needs to be evaluated + if (this.subiter == null) it.subiter = null + else { + val lst = this.subiter.to(immutable.List) + this.subiter = lst.iterator + it.subiter = lst.iterator + } + } + + /** Returns a sequence of iterators over subsets of this iterator. + * It's used to ease the implementation of splitters for a parallel version of the TrieMap. + */ + protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { + // the case where an LNode is being iterated + val it = newIterator(level + 1, ct, _mustInit = false) + it.depth = -1 + it.subiter = this.subiter + it.current = null + this.subiter = null + advance() + this.level += 1 + Seq(it, this) + } else if (depth == -1) { + this.level += 1 + Seq(this) + } else { + var d = 0 + while (d <= depth) { + val rem = stack(d).length - 1 - stackpos(d) + if (rem > 0) { + val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) + stack(d) = arr1 + stackpos(d) = -1 + val it = newIterator(level + 1, ct, _mustInit = false) + it.stack(0) = arr2 + it.stackpos(0) = -1 + it.depth = 0 + it.advance() // <-- fix it + this.level += 1 + return Seq(this, it) + } + d += 1 + } + this.level += 1 + Seq(this) + } + +} + +/** Only used for ctrie serialization. */ +@SerialVersionUID(3L) +private[concurrent] case object TrieMapSerializationEnd diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala new file mode 100644 index 000000000000..3d155337aa93 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala @@ -0,0 +1,260 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} + +/** Defines converter methods from Scala to Java collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsJavaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Scala `Iterator` to a Java `Iterator`. + * + * The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterator` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Iterator` view of the argument. + */ + def asJava[A](i: Iterator[A]): ju.Iterator[A] = i match { + case null => null + case wrapper: JIteratorWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterator` to a Java `Enumeration`. + * + * The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects + * of using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Enumeration` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Enumeration` view of the argument. + */ + def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { + case null => null + case wrapper: JEnumerationWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterable` to a Java `Iterable`. + * + * The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterable` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Iterable` view of the argument. + */ + def asJava[A](i: Iterable[A]): jl.Iterable[A] = i match { + case null => null + case wrapper: JIterableWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala `Iterable` to an immutable Java `Collection`. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Collection` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Collection` view of the argument. + */ + def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { + case null => null + case wrapper: JCollectionWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala mutable `Buffer` to a Java List. + * + * The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using + * it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Buffer` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param b The Scala `Buffer` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](b: mutable.Buffer[A]): ju.List[A] = b match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableBufferWrapper(b) + } + + /** + * Converts a Scala mutable `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: mutable.Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableSeqWrapper(s) + } + + /** + * Converts a Scala `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new SeqWrapper(s) + } + + /** + * Converts a Scala mutable `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala mutable `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: mutable.Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new MutableSetWrapper(s) + } + + /** + * Converts a Scala `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new SetWrapper(s) + } + + /** + * Converts a Scala mutable `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala mutable `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MutableMapWrapper(m) + } + + /** + * Converts a Scala mutable `Map` to a Java `Dictionary`. + * + * The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Dictionary` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Dictionary` view of the argument. + */ + def asJavaDictionary[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = m match { + case null => null + case wrapper: JDictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new DictionaryWrapper(m) + } + + /** + * Converts a Scala `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MapWrapper(m) + } + + /** + * Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * + * The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `ConcurrentMap` will be returned. + * + * @param m The Scala `concurrent.Map` to be converted. + * @return A Java `ConcurrentMap` view of the argument. + */ + def asJava[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = m match { + case null => null + case wrapper: JConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new ConcurrentMapWrapper(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala new file mode 100644 index 000000000000..16b15c513a17 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala @@ -0,0 +1,108 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +/** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsJavaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsJava[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Iterator`, see + * [[AsJavaConverters.asJava[A](i:Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Iterator[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Enumeration`, see + * [[AsJavaConverters.asJavaEnumeration `scala.jdk.javaapi.CollectionConverters.asJavaEnumeration`]]. + */ + def asJavaEnumeration: ju.Enumeration[A] = conv.asJavaEnumeration(i) + } + + implicit class IterableHasAsJava[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Iterable`, see + * [[AsJavaConverters.asJava[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: jl.Iterable[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Collection`, see + * [[AsJavaConverters.asJavaCollection `scala.jdk.javaapi.CollectionConverters.asJavaCollection`]]. + */ + def asJavaCollection: ju.Collection[A] = conv.asJavaCollection(i) + } + + implicit class BufferHasAsJava[A](b: mutable.Buffer[A]) { + /** Converts a Scala `Buffer` to a Java `List`, see + * [[AsJavaConverters.asJava[A](b:scala\.collection\.mutable\.Buffer[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(b) + } + + implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class SeqHasAsJava[A](s: Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class MutableSetHasAsJava[A](s: mutable.Set[A]) { + /** Converts a Scala `mutable.Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class SetHasAsJava[A](s: Set[A]) { + /** Converts a Scala `Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V]) { + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.mutable\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJavaDictionary `scala.jdk.javaapi.CollectionConverters.asJavaDictionary`]]. + */ + def asJavaDictionary: ju.Dictionary[K, V] = conv.asJavaDictionary(m) + } + + implicit class MapHasAsJava[K, V](m: Map[K, V]) { + /** Converts a Scala `Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + } + + implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V]) { + /** Converts a Scala `concurrent.Map` to a Java `ConcurrentMap`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.concurrent\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: juc.ConcurrentMap[K, V] = conv.asJava(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala new file mode 100644 index 000000000000..30a28ae38147 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala @@ -0,0 +1,207 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} + +/** Defines converter methods from Java to Scala collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsScalaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Java `Iterator` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Iterator` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterator` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterator` will be returned. + * + * @param i The Java `Iterator` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](i: ju.Iterator[A]): Iterator[A] = i match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JIteratorWrapper(i) + } + + /** + * Converts a Java `Enumeration` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Enumeration` and any side-effects + * of using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Enumeration` was previously obtained from an implicit or explicit call of + * `asJavaEnumeration` then the original Scala `Iterator` will be returned. + * + * @param e The Java `Enumeration` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](e: ju.Enumeration[A]): Iterator[A] = e match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JEnumerationWrapper(e) + } + + /** + * Converts a Java `Iterable` to a Scala `Iterable`. + * + * The returned Scala `Iterable` is backed by the provided Java `Iterable` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterable` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterable` will be returned. + * + * @param i The Java `Iterable` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](i: jl.Iterable[A]): Iterable[A] = i match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JIterableWrapper(i) + } + + /** + * Converts a Java `Collection` to a Scala `Iterable`. + * + * If the Java `Collection` was previously obtained from an implicit or explicit call of + * `asJavaCollection` then the original Scala `Iterable` will be returned. + * + * @param c The Java `Collection` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](c: ju.Collection[A]): Iterable[A] = c match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JCollectionWrapper(c) + } + + /** + * Converts a Java `List` to a Scala mutable `Buffer`. + * + * The returned Scala `Buffer` is backed by the provided Java `List` and any side-effects of using + * it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `List` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Buffer` will be returned. + * + * @param l The Java `List` to be converted. + * @return A Scala mutable `Buffer` view of the argument. + */ + def asScala[A](l: ju.List[A]): mutable.Buffer[A] = l match { + case null => null + case wrapper: MutableBufferWrapper[A @uc] => wrapper.underlying + case _ => new JListWrapper(l) + } + + /** + * Converts a Java `Set` to a Scala mutable `Set`. + * + * The returned Scala `Set` is backed by the provided Java `Set` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Set` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Set` will be returned. + * + * @param s The Java `Set` to be converted. + * @return A Scala mutable `Set` view of the argument. + */ + def asScala[A](s: ju.Set[A]): mutable.Set[A] = s match { + case null => null + case wrapper: MutableSetWrapper[A @uc] => wrapper.underlying + case _ => new JSetWrapper(s) + } + + /** + * Converts a Java `Map` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Map` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Map` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Map` will be returned. + * + * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is + * your responsibility to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an atomic `get` when `null` + * values may be present. + * + * @param m The Java `Map` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = m match { + case null => null + case wrapper: MutableMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JMapWrapper(m) + } + + /** + * Converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * + * The returned Scala `ConcurrentMap` is backed by the provided Java `ConcurrentMap` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `ConcurrentMap` will be returned. + * + * @param m The Java `ConcurrentMap` to be converted. + * @return A Scala mutable `ConcurrentMap` view of the argument. + */ + def asScala[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = m match { + case null => null + case wrapper: ConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlyingConcurrentMap + case _ => new JConcurrentMapWrapper(m) + } + + /** + * Converts a Java `Dictionary` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Dictionary` was previously obtained from an implicit or explicit call of + * `asJavaDictionary` then the original Scala `Map` will be returned. + * + * @param d The Java `Dictionary` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](d: ju.Dictionary[K, V]): mutable.Map[K, V] = d match { + case null => null + case wrapper: DictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JDictionaryWrapper(d) + } + + /** + * Converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * + * The returned Scala `Map[String, String]` is backed by the provided Java `Properties` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * @param p The Java `Properties` to be converted. + * @return A Scala mutable `Map[String, String]` view of the argument. + */ + def asScala(p: ju.Properties): mutable.Map[String, String] = p match { + case null => null + case _ => new JPropertiesWrapper(p) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala new file mode 100644 index 000000000000..39347dde903b --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala @@ -0,0 +1,93 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +/** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsScalaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsScala[A](i: ju.Iterator[A]) { + /** Converts a Java `Iterator` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](i:java\.util\.Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(i) + } + + implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A]) { + /** Converts a Java `Enumeration` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](e:java\.util\.Enumeration[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(e) + } + + implicit class IterableHasAsScala[A](i: jl.Iterable[A]) { + /** Converts a Java `Iterable` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(i) + } + + implicit class CollectionHasAsScala[A](c: ju.Collection[A]) { + /** Converts a Java `Collection` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](c:java\.util\.Collection[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(c) + } + + implicit class ListHasAsScala[A](l: ju.List[A]) { + /** Converts a Java `List` to a Scala `Buffer`, see + * [[AsScalaConverters.asScala[A](l:java\.util\.List[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Buffer[A] = conv.asScala(l) + } + + implicit class SetHasAsScala[A](s: ju.Set[A]) { + /** Converts a Java `Set` to a Scala `Set`, see + * [[AsScalaConverters.asScala[A](s:java\.util\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Set[A] = conv.asScala(s) + } + + implicit class MapHasAsScala[K, V](m: ju.Map[K, V]) { + /** Converts a Java `Map` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.Map[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(m) + } + + implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V]) { + /** Converts a Java `ConcurrentMap` to a Scala `concurrent.Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.concurrent\.ConcurrentMap[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: concurrent.Map[K, V] = conv.asScala(m) + } + + implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V]) { + /** Converts a Java `Dictionary` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](d:java\.util\.Dictionary[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(d) + } + + implicit class PropertiesHasAsScala(i: ju.Properties) { + /** Converts a Java `Properties` to a Scala `Map`, see + * [[AsScalaConverters.asScala(p:java\.util\.Properties)* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[String, String] = conv.asScala(i) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala new file mode 100644 index 000000000000..05d63f9fdeee --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala @@ -0,0 +1,181 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.JavaConverters._ +import scala.language.implicitConversions + +/** Defines implicit converter methods from Java to Scala collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToScalaImplicits { + /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[JavaConverters.asScalaIterator]] + */ + implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) + + /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[JavaConverters.enumerationAsScalaIterator]] + */ + implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) + + /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[JavaConverters.iterableAsScalaIterable]] + */ + implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) + + /** Implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[JavaConverters.collectionAsScalaIterable]] + */ + implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) + + /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[JavaConverters.asScalaBuffer]] + */ + implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) + + /** Implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[JavaConverters.asScalaSet]] + */ + implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) + + /** Implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[JavaConverters.mapAsScalaMap]] + */ + implicit def `map AsScala`[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = mapAsScalaMap(m) + + /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * @see [[JavaConverters.mapAsScalaConcurrentMap]] + */ + implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = mapAsScalaConcurrentMap(m) + + /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[JavaConverters.dictionaryAsScalaMap]] + */ + implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V]): mutable.Map[K, V] = dictionaryAsScalaMap(p) + + /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. + * @see [[JavaConverters.propertiesAsScalaMap]] + */ + implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) +} + +/** Defines implicit conversions from Scala to Java collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToJavaImplicits { + /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[JavaConverters.asJavaIterator]] + */ + implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) + + /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[JavaConverters.asJavaEnumeration]] + */ + implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) + + /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[JavaConverters.asJavaIterable]] + */ + implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) + + /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[JavaConverters.asJavaCollection]] + */ + implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) + + /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[JavaConverters.bufferAsJavaList]] + */ + implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) + + /** Implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[JavaConverters.mutableSeqAsJavaList]] + */ + implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) + + /** Implicitly converts a Scala `Seq` to a Java `List`. + * @see [[JavaConverters.seqAsJavaList]] + */ + implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) + + /** Implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[JavaConverters.mutableSetAsJavaSet]] + */ + implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) + + /** Implicitly converts a Scala `Set` to a Java `Set`. + * @see [[JavaConverters.setAsJavaSet]] + */ + implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) + + /** Implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[JavaConverters.mutableMapAsJavaMap]] + */ + implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = mutableMapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[JavaConverters.asJavaDictionary]] + */ + implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = asJavaDictionary(m) + + /** Implicitly converts a Scala `Map` to a Java `Map`. + * @see [[JavaConverters.mapAsJavaMap]] + */ + implicit def `map AsJavaMap`[K, V](m: Map[K, V]): ju.Map[K, V] = mapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[JavaConverters.mapAsJavaConcurrentMap]] + */ + implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = mapAsJavaConcurrentMap(m) +} + +/** + * Convenience for miscellaneous implicit conversions from Scala to Java collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToJava extends ToJavaImplicits + +/** + * Convenience for miscellaneous implicit conversions from Java to Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToScala extends ToScalaImplicits + +/** + * Convenience for miscellaneous implicit conversions between Java and Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues. Example: + * + * {{{ + * import collection.convert.ImplicitConversions._ + * case class StringBox(s: String) + * val m = Map(StringBox("one") -> "uno") + * m.get("one") + * }}} + * + * The above example returns `null` instead of producing a type error at compile-time. The map is + * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits diff --git a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala new file mode 100644 index 000000000000..29c3dcbac5db --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala @@ -0,0 +1,614 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.util.{NavigableMap} +import java.{lang => jl, util => ju} + +import scala.jdk.CollectionConverters._ +import scala.util.Try +import scala.util.chaining._ +import scala.util.control.ControlThrowable + +/** Wrappers for exposing Scala collections as Java collections and vice-versa */ +@SerialVersionUID(3L) +// not private[convert] because `WeakHashMap` uses JMapWrapper +private[collection] object JavaCollectionWrappers extends Serializable { + @SerialVersionUID(3L) + class IteratorWrapper[A](val underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next() + def hasMoreElements = underlying.hasNext + def nextElement() = underlying.next() + override def remove() = throw new UnsupportedOperationException + } + + @SerialVersionUID(3L) + class JIteratorWrapper[A](val underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next + } + + @SerialVersionUID(3L) + class JEnumerationWrapper[A](val underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasMoreElements + def next() = underlying.nextElement + } + + trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { + val underlying: Iterable[A] + def size = underlying.size + override def iterator = new IteratorWrapper(underlying.iterator) + override def isEmpty = underlying.isEmpty + } + + @SerialVersionUID(3L) + class IterableWrapper[A](val underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] with Serializable { + import scala.runtime.Statics._ + override def equals(other: Any): Boolean = + other match { + case other: IterableWrapper[_] => underlying.equals(other.underlying) + case _ => false + } + override def hashCode = finalizeHash(mix(mix(0xcafebabe, "IterableWrapper".hashCode), anyHash(underlying)), 1) + } + + @SerialVersionUID(3L) + class JIterableWrapper[A](val underlying: jl.Iterable[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def iterableFactory = mutable.ArrayBuffer + override def isEmpty: Boolean = !underlying.iterator().hasNext + } + + @SerialVersionUID(3L) + class JCollectionWrapper[A](val underlying: ju.Collection[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def size = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterableFactory = mutable.ArrayBuffer + } + + @SerialVersionUID(3L) + class SeqWrapper[A](val underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + } + + @SerialVersionUID(3L) + class MutableSeqWrapper[A](val underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { + val p = underlying(i) + underlying(i) = elem + p + } + } + + @SerialVersionUID(3L) + class MutableBufferWrapper[A](val underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } + override def add(elem: A) = { underlying += elem; true } + override def remove(i: Int) = underlying remove i + } + + @SerialVersionUID(3L) + class JListWrapper[A](val underlying: ju.List[A]) + extends mutable.AbstractBuffer[A] + with SeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with StrictOptimizedSeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with IterableFactoryDefaults[A, mutable.Buffer] + with Serializable { + def length = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterator: Iterator[A] = underlying.iterator.asScala + def apply(i: Int) = underlying.get(i) + def update(i: Int, elem: A) = underlying.set(i, elem) + def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } + def addOne(elem: A): this.type = { underlying add elem; this } + def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) + def insertAll(i: Int, elems: IterableOnce[A]) = { + val ins = underlying.subList(0, i) + elems.iterator.foreach(ins.add(_)) + } + def remove(i: Int) = underlying.remove(i) + def clear() = underlying.clear() + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + def remove(from: Int, n: Int): Unit = underlying.subList(from, from+n).clear() + override def iterableFactory = mutable.ArrayBuffer + override def subtractOne(elem: A): this.type = { underlying.remove(elem.asInstanceOf[AnyRef]); this } + } + + @SerialVersionUID(3L) + class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => + // Note various overrides to avoid performance gotchas. + override def contains(o: Object): Boolean = { + try { underlying.contains(o.asInstanceOf[A]) } + catch { case cce: ClassCastException => false } + } + override def isEmpty = underlying.isEmpty + def size = underlying.size + def iterator = new ju.Iterator[A] { + val ui = underlying.iterator + var prev: Option[A] = None + def hasNext = ui.hasNext + def next = { val e = ui.next(); prev = Some(e); e } + override def remove() = prev match { + case Some(e) => + underlying match { + case ms: mutable.Set[a] => + ms remove e + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + + @SerialVersionUID(3L) + class MutableSetWrapper[A](val underlying: mutable.Set[A]) extends SetWrapper[A](underlying) with Serializable { + override def add(elem: A) = { + val sz = underlying.size + underlying += elem + sz < underlying.size + } + override def remove(elem: AnyRef) = + try underlying.remove(elem.asInstanceOf[A]) + catch { case ex: ClassCastException => false } + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + class JSetWrapper[A](val underlying: ju.Set[A]) + extends mutable.AbstractSet[A] + with mutable.SetOps[A, mutable.Set, mutable.Set[A]] + with StrictOptimizedSetOps[A, mutable.Set, mutable.Set[A]] + with Serializable { + + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + def iterator: Iterator[A] = underlying.iterator.asScala + + def contains(elem: A): Boolean = underlying.contains(elem) + + def addOne(elem: A): this.type = { underlying add elem; this } + def subtractOne(elem: A): this.type = { underlying remove elem; this } + + override def remove(elem: A): Boolean = underlying remove elem + + override def clear(): Unit = { + underlying.clear() + } + + override def empty: mutable.Set[A] = new JSetWrapper(new ju.HashSet[A]) + + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): mutable.Set[A] = new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) + + override def iterableFactory: IterableFactory[mutable.Set] = mutable.HashSet + + override def filterInPlace(p: A => Boolean): this.type = { + if (underlying.size() > 0) underlying.removeIf(!p(_)) + this + } + } + + @SerialVersionUID(3L) + class MapWrapper[K, V](underlying: Map[K, V]) extends ju.AbstractMap[K, V] with Serializable { self => + override def size = underlying.size + + override def get(key: AnyRef): V = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def entrySet: ju.Set[ju.Map.Entry[K, V]] = new ju.AbstractSet[ju.Map.Entry[K, V]] { + def size = self.size + + def iterator = new ju.Iterator[ju.Map.Entry[K, V]] { + val ui = underlying.iterator + var prev : Option[K] = None + + def hasNext = ui.hasNext + + def next() = { + val (k, v) = ui.next() + prev = Some(k) + new ju.Map.Entry[K, V] { + def getKey = k + def getValue = v + def setValue(v1 : V) = self.put(k, v1) + + // It's important that this implementation conform to the contract + // specified in the javadocs of java.util.Map.Entry.hashCode + // + // See https://github.com/scala/bug/issues/10663 + override def hashCode = { + (if (k == null) 0 else k.hashCode()) ^ + (if (v == null) 0 else v.hashCode()) + } + + override def equals(other: Any) = other match { + case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue + case _ => false + } + } + } + + override def remove(): Unit = { + prev match { + case Some(k) => + underlying match { + case mm: mutable.Map[a, _] => + mm -= k + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + } + + override def containsKey(key: AnyRef): Boolean = try { + // Note: Subclass of collection.Map with specific key type may redirect generic + // contains to specific contains, which will throw a ClassCastException if the + // wrong type is passed. This is why we need a type cast to A inside a try/catch. + underlying.contains(key.asInstanceOf[K]) + } catch { + case ex: ClassCastException => false + } + } + + @SerialVersionUID(3L) + class MutableMapWrapper[K, V](val underlying: mutable.Map[K, V]) extends MapWrapper[K, V](underlying) { + override def put(k: K, v: V) = underlying.put(k, v) match { + case Some(v1) => v1 + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef): V = try { + underlying remove k.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + abstract class AbstractJMapWrapper[K, V] + extends mutable.AbstractMap[K, V] + with JMapWrapperLike[K, V, mutable.Map, mutable.Map[K, V]] with Serializable + + trait JMapWrapperLike[K, V, +CC[X, Y] <: mutable.MapOps[X, Y, CC, _], +C <: mutable.MapOps[K, V, CC, C]] + extends mutable.MapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), mutable.Iterable, C] { + + def underlying: ju.Map[K, V] + + override def size = underlying.size + + // support Some(null) if currently bound to null + def get(k: K) = { + val v = underlying.get(k) + if (v != null) + Some(v) + else if (underlying.containsKey(k)) + Some(null.asInstanceOf[V]) + else + None + } + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => update(key, null.asInstanceOf[V]); null.asInstanceOf[V] + case v => v + } + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + // support Some(null) if currently bound to null + override def put(k: K, v: V): Option[V] = + if (v == null) { + val present = underlying.containsKey(k) + val result = underlying.put(k, v) + if (present) Some(result) else None + } else { + var result: Option[V] = None + def recompute(k0: K, v0: V): V = v.tap(_ => + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + ) + underlying.compute(k, recompute) + result + } + + override def update(k: K, v: V): Unit = underlying.put(k, v) + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => update(key, null.asInstanceOf[V]); Some(null.asInstanceOf[V]) + } + } + + // support Some(null) if currently bound to null + override def remove(k: K): Option[V] = { + var result: Option[V] = None + def recompute(k0: K, v0: V): V = { + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + null.asInstanceOf[V] + } + underlying.compute(k, recompute) + result + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { val e = ui.next(); (e.getKey, e.getValue) } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val i = underlying.entrySet().iterator() + while (i.hasNext) { + val entry = i.next() + f(entry.getKey, entry.getValue) + } + } + + override def clear() = underlying.clear() + + } + + /** Wraps a Java map as a Scala one. If the map is to support concurrent access, + * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized + * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility + * to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + */ + @SerialVersionUID(3L) + class JMapWrapper[K, V](val underlying : ju.Map[K, V]) + extends AbstractJMapWrapper[K, V] with Serializable { + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JMapWrapper(new ju.HashMap[K, V]) + } + + @SerialVersionUID(3L) + class ConcurrentMapWrapper[K, V](underlying: concurrent.Map[K, V]) extends MutableMapWrapper[K, V](underlying) with juc.ConcurrentMap[K, V] { + + def underlyingConcurrentMap: concurrent.Map[K, V] = underlying + + override def putIfAbsent(k: K, v: V) = underlying.putIfAbsent(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef, v: AnyRef) = try { + underlying.remove(k.asInstanceOf[K], v.asInstanceOf[V]) + } catch { + case ex: ClassCastException => + false + } + + override def replace(k: K, v: V): V = underlying.replace(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def replace(k: K, oldval: V, newval: V) = underlying.replace(k, oldval, newval) + } + + /** Wraps a concurrent Java map as a Scala one. Single-element concurrent + * access is supported; multi-element operations such as maps and filters + * are not guaranteed to be atomic. + */ + @SerialVersionUID(3L) + class JConcurrentMapWrapper[K, V](val underlying: juc.ConcurrentMap[K, V]) + extends AbstractJMapWrapper[K, V] + with concurrent.Map[K, V] { + + override def get(k: K) = Option(underlying get k) + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => super/*[concurrent.Map]*/.getOrElseUpdate(key, op) + case v => v + } + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[K, V]) + + def putIfAbsent(k: K, v: V): Option[V] = Option(underlying.putIfAbsent(k, v)) + + def remove(k: K, v: V): Boolean = underlying.remove(k, v) + + def replace(k: K, v: V): Option[V] = Option(underlying.replace(k, v)) + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = underlying.replace(k, oldvalue, newvalue) + + override def lastOption: Option[(K, V)] = + underlying match { + case nav: NavigableMap[K @unchecked, V @unchecked] => Option(nav.lastEntry).map(e => (e.getKey, e.getValue)) + case _ if isEmpty => None + case _ => Try(last).toOption + } + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull // see scala/scala#10129 + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => super/*[concurrent.Map]*/.updateWith(key)(remappingFunction) + } + } + } + + @SerialVersionUID(3L) + class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { + def size: Int = underlying.size + def isEmpty: Boolean = underlying.isEmpty + def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration + def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration + def get(key: AnyRef) = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + def put(key: K, value: V): V = underlying.put(key, value) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + override def remove(key: AnyRef) = try { + underlying remove key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + } + + @SerialVersionUID(3L) + class JDictionaryWrapper[K, V](val underlying: ju.Dictionary[K, V]) extends mutable.AbstractMap[K, V] with Serializable { + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + + def get(k: K) = Option(underlying get k) + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + override def put(k: K, v: V): Option[V] = Option(underlying.put(k, v)) + + override def update(k: K, v: V): Unit = { underlying.put(k, v) } + + override def remove(k: K): Option[V] = Option(underlying remove k) + def iterator = underlying.keys.asScala map (k => (k, underlying get k)) + + override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) + + override def mapFactory = mutable.HashMap + } + + @SerialVersionUID(3L) + class JPropertiesWrapper(underlying: ju.Properties) + extends mutable.AbstractMap[String, String] + with mutable.MapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedMapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedIterableOps[(String, String), mutable.Iterable, mutable.Map[String, String]] + with Serializable { + + override def size = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = size + def get(k: String) = { + val v = underlying get k + if (v != null) Some(v.asInstanceOf[String]) else None + } + + def addOne(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: String): this.type = { underlying remove key; this } + + override def put(k: String, v: String): Option[String] = { + val r = underlying.put(k, v) + if (r != null) Some(r.asInstanceOf[String]) else None + } + + override def update(k: String, v: String): Unit = { underlying.put(k, v) } + + override def remove(k: String): Option[String] = { + val r = underlying remove k + if (r != null) Some(r.asInstanceOf[String]) else None + } + + def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { + val e = ui.next() + (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) + } + } + + override def clear() = underlying.clear() + + override def empty = new JPropertiesWrapper(new ju.Properties) + + def getProperty(key: String) = underlying.getProperty(key) + + def getProperty(key: String, defaultValue: String) = + underlying.getProperty(key, defaultValue) + + def setProperty(key: String, value: String) = + underlying.setProperty(key, value) + + override def mapFactory = mutable.HashMap + } + + /** Thrown when certain Map operations attempt to put a null value. */ + private val PutNull = new ControlThrowable {} +} diff --git a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala new file mode 100644 index 000000000000..cdeea62fb5ed --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala @@ -0,0 +1,480 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert + +import java.util.Spliterator +import java.util.stream._ +import java.{lang => jl} + +import scala.annotation.implicitNotFound +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} +import scala.jdk.CollectionConverters._ +import scala.jdk._ + +/** Defines extension methods to create Java Streams for Scala collections, available through + * [[scala.jdk.javaapi.StreamConverters]]. + */ +trait StreamExtensions { + // collections + + implicit class IterableHasSeqStream[A](cc: IterableOnce[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = + s.fromStepper(cc.stepper, par = false) + } + + // Not `CC[X] <: IterableOnce[X]`, but `C` with an extra constraint, to support non-parametric classes like IntAccumulator + implicit class IterableNonGenericHasParStream[A, C <: IterableOnce[_]](c: C)(implicit ev: C <:< IterableOnce[A]) { + private type IterableOnceWithEfficientStepper = IterableOnce[A] { + def stepper[S <: Stepper[_]](implicit shape : StepperShape[A, S]) : S with EfficientSplit + } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[A, S, St], + st: StepperShape[A, St], + @implicitNotFound("`parStream` can only be called on collections where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: C <:< IterableOnceWithEfficientStepper): S = + s.fromStepper(ev(c).stepper, par = true) + } + + // maps + + implicit class MapHasSeqKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[K, S, St], st: StepperShape[K, St]): S = + s.fromStepper(cc.keyStepper, par = false) + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[V, S, St], st: StepperShape[V, St]): S = + s.fromStepper(cc.valueStepper, par = false) + + // The asJavaSeqStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[(K, V), S, St], st: StepperShape[(K, V), St]): S = + s.fromStepper(cc.stepper, par = false) + } + + + implicit class MapHasParKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + private type MapOpsWithEfficientKeyStepper = collection.MapOps[K, V, collection.Map, _] { def keyStepper[S <: Stepper[_]](implicit shape : StepperShape[K, S]) : S with EfficientSplit } + private type MapOpsWithEfficientValueStepper = collection.MapOps[K, V, collection.Map, _] { def valueStepper[S <: Stepper[_]](implicit shape : StepperShape[V, S]) : S with EfficientSplit } + private type MapOpsWithEfficientStepper = collection.MapOps[K, V, collection.Map, _] { def stepper[S <: Stepper[_]](implicit shape : StepperShape[(K, V), S]) : S with EfficientSplit } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[K, S, St], + st: StepperShape[K, St], + @implicitNotFound("parKeyStream can only be called on maps where `keyStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientKeyStepper): S = + s.fromStepper(cc.keyStepper, par = true) + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[V, S, St], + st: StepperShape[V, St], + @implicitNotFound("parValueStream can only be called on maps where `valueStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientValueStepper): S = + s.fromStepper(cc.valueStepper, par = true) + + // The asJavaParStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[(K, V), S, St], + st: StepperShape[(K, V), St], + @implicitNotFound("parStream can only be called on maps where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientStepper): S = + s.fromStepper(cc.stepper, par = true) + } + + // steppers + + implicit class StepperHasSeqStream[A](stepper: Stepper[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] => st.seqUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = false) + } + } + + implicit class StepperHasParStream[A](stepper: Stepper[A] with EfficientSplit) { + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] with EfficientSplit => st.parUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = true) + } + } + + // arrays + // uses the JDK array spliterators (`DoubleArraySpliterator`). users can also call + // `array.stepper.seqStream`, which then uses the Scala steppers (`DoubleArrayStepper`). the + // steppers are also available on byte/short/char/float arrays (`WidenedByteArrayStepper`), + // JDK spliterators only for double/int/long/reference. + + implicit class DoubleArrayHasSeqParStream(a: Array[Double]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = asJavaSeqStream.parallel + } + + implicit class IntArrayHasSeqParStream(a: Array[Int]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = asJavaSeqStream.parallel + } + + implicit class LongArrayHasSeqParStream(a: Array[Long]) { + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaSeqStream: LongStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaParStream: LongStream = asJavaSeqStream.parallel + } + + implicit class AnyArrayHasSeqParStream[A <: AnyRef](a: Array[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaSeqStream: Stream[A] = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaParStream: Stream[A] = asJavaSeqStream.parallel + } + + implicit class ByteArrayHasSeqParStream(a: Array[Byte]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class ShortArrayHasSeqParStream(a: Array[Short]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class CharArrayHasSeqParStream(a: Array[Char]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class FloatArrayHasSeqParStream(a: Array[Float]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = a.stepper.asJavaParStream + } + + + + // strings + + implicit class StringHasSeqParStream(s: String) { + /** + * A sequential stream on the characters of a string, same as [[asJavaSeqCharStream]]. See also + * [[asJavaSeqCodePointStream]]. + */ + def asJavaSeqStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ false) + /** + * A parallel stream on the characters of a string, same as [[asJavaParCharStream]]. See also + * [[asJavaParCodePointStream]]. + */ + def asJavaParStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ true) + + /** A sequential stream on the characters of a string. See also [[asJavaSeqCodePointStream]]. */ + def asJavaSeqCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ false) + /** A parallel stream on the characters of a string. See also [[asJavaParCodePointStream]]. */ + def asJavaParCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ true) + + /** A sequential stream on the code points of a string. See also [[asJavaSeqCharStream]]. */ + def asJavaSeqCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ false) + /** A parallel stream on the code points of a string. See also [[asJavaParCharStream]]. */ + def asJavaParCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ true) + } + + // toScala for streams + + implicit class StreamHasToScala[A](stream: Stream[A]) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts streams of boxed integers, longs or + * doubles are converted to the primitive accumulators ([[scala.jdk.IntAccumulator]], etc.). + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[A, C1])(implicit info: AccumulatorFactoryInfo[A, C1]): C1 = { + + def anyAcc = stream.collect(AnyAccumulator.supplier[A], AnyAccumulator.adder[A], AnyAccumulator.merger[A]) + if (info.companion == AnyAccumulator) anyAcc.asInstanceOf[C1] + else if (info.companion == IntAccumulator) stream.asInstanceOf[Stream[Int]].collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger).asInstanceOf[C1] + else if (info.companion == LongAccumulator) stream.asInstanceOf[Stream[Long]].collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) stream.asInstanceOf[Stream[Double]].collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger).asInstanceOf[C1] + else if (stream.isParallel) anyAcc.to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** Convert a generic Java Stream wrapping a primitive type to a corresponding primitive + * Stream. + */ + def asJavaPrimitiveStream[S](implicit unboxer: StreamUnboxer[A, S]): S = unboxer(stream) + } + + implicit class IntStreamHasToScala(stream: IntStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `IntStream` to a primitive + * [[scala.jdk.IntAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Int, C1])(implicit info: AccumulatorFactoryInfo[Int, C1]): C1 = { + def intAcc = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C1] + else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C1] + else if (stream.isParallel) intAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + } + + implicit class LongStreamHasToScala(stream: LongStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `LongStream` to a primitive + * [[scala.jdk.LongAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Long, C1])(implicit info: AccumulatorFactoryInfo[Long, C1]): C1 = { + def longAcc = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C1] + else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C1] + else if (stream.isParallel) longAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + } + + implicit class DoubleStreamHasToScala(stream: DoubleStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `DoubleStream` to a primitive + * [[scala.jdk.DoubleAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Double, C1])(implicit info: AccumulatorFactoryInfo[Double, C1]): C1 = { + def doubleAcc = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C1] + else if (stream.isParallel) doubleAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + } +} + +object StreamExtensions { + /** An implicit StreamShape instance connects element types with the corresponding specialized + * Stream and Stepper types. This is used in `asJavaStream` extension methods to create + * generic or primitive streams according to the element type. + */ + sealed trait StreamShape[T, S <: BaseStream[_, _], St <: Stepper[_]] { + final def fromStepper(st: St, par: Boolean): S = mkStream(st, par) + protected def mkStream(st: St, par: Boolean): S + } + + object StreamShape extends StreamShapeLowPriority1 { + // primitive + implicit val intStreamShape : StreamShape[Int , IntStream , IntStepper] = mkIntStreamShape[Int] + implicit val longStreamShape : StreamShape[Long , LongStream , LongStepper] = mkLongStreamShape[Long] + implicit val doubleStreamShape: StreamShape[Double, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Double] + + // widening + implicit val byteStreamShape : StreamShape[Byte , IntStream , IntStepper] = mkIntStreamShape[Byte] + implicit val shortStreamShape: StreamShape[Short, IntStream , IntStepper] = mkIntStreamShape[Short] + implicit val charStreamShape : StreamShape[Char , IntStream , IntStepper] = mkIntStreamShape[Char] + implicit val floatStreamShape: StreamShape[Float, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Float] + + // boxed java primitives + + implicit val jIntegerStreamShape : StreamShape[jl.Integer , IntStream , IntStepper ] = mkIntStreamShape[jl.Integer] + implicit val jLongStreamShape : StreamShape[jl.Long , LongStream , LongStepper ] = mkLongStreamShape[jl.Long] + implicit val jDoubleStreamShape : StreamShape[jl.Double , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Double] + implicit val jByteStreamShape : StreamShape[jl.Byte , IntStream , IntStepper ] = mkIntStreamShape[jl.Byte] + implicit val jShortStreamShape : StreamShape[jl.Short , IntStream , IntStepper ] = mkIntStreamShape[jl.Short] + implicit val jCharacterStreamShape : StreamShape[jl.Character, IntStream , IntStepper ] = mkIntStreamShape[jl.Character] + implicit val jFloatStreamShape : StreamShape[jl.Float , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Float] + + private def mkIntStreamShape[T]: StreamShape[T, IntStream, IntStepper] = new StreamShape[T, IntStream, IntStepper] { + protected def mkStream(st: IntStepper, par: Boolean): IntStream = StreamSupport.intStream(st.spliterator, par) + } + + private def mkLongStreamShape[T]: StreamShape[T, LongStream, LongStepper] = new StreamShape[T, LongStream, LongStepper] { + protected def mkStream(st: LongStepper, par: Boolean): LongStream = StreamSupport.longStream(st.spliterator, par) + } + + private def mkDoubleStreamShape[T]: StreamShape[T, DoubleStream, DoubleStepper] = new StreamShape[T, DoubleStream, DoubleStepper] { + protected def mkStream(st: DoubleStepper, par: Boolean): DoubleStream = StreamSupport.doubleStream(st.spliterator, par) + } + } + + trait StreamShapeLowPriority1 { + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T], Stepper[T]] = anyStreamShapePrototype.asInstanceOf[StreamShape[T, Stream[T], Stepper[T]]] + + private[this] val anyStreamShapePrototype: StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] = new StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] { + def mkStream(s: Stepper[AnyRef], par: Boolean): Stream[AnyRef] = StreamSupport.stream(s.spliterator.asInstanceOf[Spliterator[AnyRef]], par) + } + } + + /** Connects a stream element type `A` to the corresponding, potentially specialized, Stream type. + * Used in the `stream.asJavaPrimitiveStream` extension method. + */ + sealed trait StreamUnboxer[A, S] { + def apply(s: Stream[A]): S + } + object StreamUnboxer { + implicit val intStreamUnboxer: StreamUnboxer[Int, IntStream] = new StreamUnboxer[Int, IntStream] { + def apply(s: Stream[Int]): IntStream = s.mapToInt(x => x) + } + implicit val javaIntegerStreamUnboxer: StreamUnboxer[jl.Integer, IntStream] = intStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Integer, IntStream]] + + implicit val longStreamUnboxer: StreamUnboxer[Long, LongStream] = new StreamUnboxer[Long, LongStream] { + def apply(s: Stream[Long]): LongStream = s.mapToLong(x => x) + } + implicit val javaLongStreamUnboxer: StreamUnboxer[jl.Long, LongStream] = longStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Long, LongStream]] + + implicit val doubleStreamUnboxer: StreamUnboxer[Double, DoubleStream] = new StreamUnboxer[Double, DoubleStream] { + def apply(s: Stream[Double]): DoubleStream = s.mapToDouble(x => x) + } + implicit val javaDoubleStreamUnboxer: StreamUnboxer[jl.Double, DoubleStream] = doubleStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Double, DoubleStream]] + } + + + + /** An implicit `AccumulatorFactoryInfo` connects primitive element types to the corresponding + * specialized [[scala.jdk.Accumulator]] factory. This is used in the `stream.toScala` extension methods + * to ensure collecting a primitive stream into a primitive accumulator does not box. + * + * When converting to a collection other than `Accumulator`, the generic + * `noAccumulatorFactoryInfo` is passed. + */ + trait AccumulatorFactoryInfo[A, C] { + val companion: AnyRef + } + trait LowPriorityAccumulatorFactoryInfo { + implicit def noAccumulatorFactoryInfo[A, C]: AccumulatorFactoryInfo[A, C] = noAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, C]] + private val noAccumulatorFactoryInfoPrototype: AccumulatorFactoryInfo[AnyRef, AnyRef] = new AccumulatorFactoryInfo[AnyRef, AnyRef] { + val companion: AnyRef = null + } + } + object AccumulatorFactoryInfo extends LowPriorityAccumulatorFactoryInfo { + implicit def anyAccumulatorFactoryInfo[A]: AccumulatorFactoryInfo[A, AnyAccumulator[A]] = anyAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, AnyAccumulator[A]]] + + private object anyAccumulatorFactoryInfoPrototype extends AccumulatorFactoryInfo[AnyRef, AnyAccumulator[AnyRef]] { + val companion: AnyRef = AnyAccumulator + } + + implicit val intAccumulatorFactoryInfo: AccumulatorFactoryInfo[Int, IntAccumulator] = new AccumulatorFactoryInfo[Int, IntAccumulator] { + val companion: AnyRef = IntAccumulator + } + + implicit val longAccumulatorFactoryInfo: AccumulatorFactoryInfo[Long, LongAccumulator] = new AccumulatorFactoryInfo[Long, LongAccumulator] { + val companion: AnyRef = LongAccumulator + } + + implicit val doubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[Double, DoubleAccumulator] = new AccumulatorFactoryInfo[Double, DoubleAccumulator] { + val companion: AnyRef = DoubleAccumulator + } + + implicit val jIntegerAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Integer, IntAccumulator] = intAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Integer, IntAccumulator]] + implicit val jLongAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Long, IntAccumulator] = longAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Long, IntAccumulator]] + implicit val jDoubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Double, IntAccumulator] = doubleAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Double, IntAccumulator]] + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala new file mode 100644 index 000000000000..845ecb4a606d --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala @@ -0,0 +1,79 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): ObjectArrayStepper[A] = new ObjectArrayStepper[A](underlying, i0, half) +} + +private[collection] class BoxedBooleanArrayStepper(underlying: Array[Boolean], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[Boolean], BoxedBooleanArrayStepper](_i0, _iN) + with AnyStepper[Boolean] { + def nextStep(): Boolean = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): BoxedBooleanArrayStepper = new BoxedBooleanArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedByteArrayStepper(underlying: Array[Byte], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedByteArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedByteArrayStepper = new WidenedByteArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedCharArrayStepper(underlying: Array[Char], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedCharArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedCharArrayStepper = new WidenedCharArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedShortArrayStepper(underlying: Array[Short], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedShortArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedShortArrayStepper = new WidenedShortArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedFloatArrayStepper(underlying: Array[Float], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, WidenedFloatArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedFloatArrayStepper = new WidenedFloatArrayStepper(underlying, i0, half) +} + +private[collection] class DoubleArrayStepper(underlying: Array[Double], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleArrayStepper = new DoubleArrayStepper(underlying, i0, half) +} + +private[collection] class IntArrayStepper(underlying: Array[Int], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntArrayStepper = new IntArrayStepper(underlying, i0, half) +} + +private[collection] class LongArrayStepper(underlying: Array[Long], _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongArrayStepper](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongArrayStepper = new LongArrayStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala new file mode 100644 index 000000000000..7c795aea5391 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala @@ -0,0 +1,248 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection._ + + +private[collection] object BinaryTreeStepper { + val emptyStack = new Array[AnyRef](0) +} + + +/** A generic stepper that can traverse ordered binary trees. + * The tree is assumed to have all the stuff on the left first, then the root, then everything on the right. + * + * Splits occur at the root of whatever has not yet been traversed (the substepper steps up to but + * does not include the root). + * + * The stepper maintains an internal stack, not relying on the tree traversal to be reversible. Trees with + * nodes that maintain a parent pointer may be traversed slightly faster without a stack, but splitting is + * more awkward. + * + * Algorithmically, this class implements a simple state machine that unrolls the left-leaning links in + * a binary tree onto a stack. At all times, the machine should be in one of these states: + * 1. Empty: `myCurrent` is `null` and `index` is `-1`. `stack` should also be `Array.empty` then. + * 2. Ready: `myCurrent` is not `null` and contains the next `A` to be extracted + * 3. Pending: `myCurrent` is `null` and `stack(index)` contains the next node to visit + * + * Subclasses should allow this class to do all the work of maintaining state; `next` should simply + * reduce `maxLength` by one, and consume `myCurrent` and set it to `null` if `hasNext` is true. + */ +private[collection] abstract class BinaryTreeStepperBase[A, T >: Null <: AnyRef, Sub >: Null, Semi <: Sub with BinaryTreeStepperBase[A, T, _, _]]( + protected var maxLength: Int, protected var myCurrent: T, protected var stack: Array[AnyRef], protected var index: Int, + protected val left: T => T, protected val right: T => T +) +extends EfficientSplit { + /** Unrolls a subtree onto the stack starting from a particular node, returning + * the last node found. This final node is _not_ placed on the stack, and + * may have things to its right. + */ + @tailrec protected final def unroll(from: T): T = { + val l = left(from) + if (l eq null) from + else { + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = from + unroll(l) + } + } + + /** Takes a subtree whose left side, if any, has already been visited, and unrolls + * the right side of the tree onto the stack, thereby detaching that node of + * the subtree from the stack entirely (so it is ready to use). It returns + * the node that is being detached. Note that the node must _not_ already be + * on the stack. + */ + protected final def detach(node: T): node.type = { + val r = right(node) + if (r ne null) { + val last = unroll(r) + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = last + } + node + } + + /** Given an empty state and the root of a new tree, initialize the tree properly + * to be in an (appropriate) ready state. Will do all sorts of wrong stuff if the + * tree is not already empty. + * + * Right now overwrites everything so could allow reuse, but isn't used for it. + */ + private[impl] final def initialize(root: T, size: Int): Unit = + if (root eq null) { + maxLength = 0 + myCurrent = null + stack = BinaryTreeStepper.emptyStack + index = -1 + } + else { + maxLength = size + index = -1 + myCurrent = detach(unroll(root)) + } + + protected def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): Semi + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = if (hasStep) maxLength else 0 + + def hasStep: Boolean = (myCurrent ne null) || (maxLength > 0 && { + if (index < 0) { maxLength = 0; stack = BinaryTreeStepper.emptyStack; false } + else { + val ans = stack(index).asInstanceOf[T] + index -= 1 + myCurrent = detach(ans) + true + } + }) + + /** Splits the tree at the root by giving everything unrolled on the stack to a new stepper, + * detaching the root, and leaving the right-hand side of the root unrolled. + * + * If the tree is empty or only has one element left, it returns `null` instead of splitting. + */ + def trySplit(): Sub = + if (!hasStep || index < 0) null + else { + val root = stack(0).asInstanceOf[T] + val leftStack = + if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) + else BinaryTreeStepper.emptyStack + val leftIndex = index - 1 + val leftCurrent = myCurrent + var leftMax = maxLength + index = -1 + detach(root) + myCurrent = root + leftMax -= 2+index + maxLength -= 2+leftIndex + semiclone(leftMax, leftCurrent, leftStack, leftIndex) + } +} + + +private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => A +) +extends BinaryTreeStepperBase[A, T, AnyStepper[A], AnyBinaryTreeStepper[A, T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): AnyBinaryTreeStepper[A, T] = + new AnyBinaryTreeStepper[A, T](maxL, myC, stk, ix, left, right, extract) +} +private[collection] object AnyBinaryTreeStepper { + def from[A, T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { + val ans = new AnyBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Double +) +extends BinaryTreeStepperBase[Double, T, DoubleStepper, DoubleBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): DoubleBinaryTreeStepper[T] = + new DoubleBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object DoubleBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { + val ans = new DoubleBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Int +) +extends BinaryTreeStepperBase[Int, T, IntStepper, IntBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): IntBinaryTreeStepper[T] = + new IntBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object IntBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { + val ans = new IntBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + + +private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Long +) +extends BinaryTreeStepperBase[Long, T, LongStepper, LongBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): LongBinaryTreeStepper[T] = + new LongBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object LongBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { + val ans = new LongBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + diff --git a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala new file mode 100644 index 000000000000..574e7fd50f1c --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala @@ -0,0 +1,118 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{BitSetOps, IntStepper, Stepper} + + +private[collection] final class BitSetStepper( + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, + _i0: Int, _iN: Int, + private var cacheIndex: Int +) +extends InOrderStepperBase[IntStepper, BitSetStepper](_i0, _iN) +with IntStepper { + import BitSetOps.{WordLength, LogWL} + + // When `found` is set, `i0` is an element that exists + protected var found: Boolean = false + + @annotation.tailrec + protected def findNext(): Boolean = + if (i0 >= iN) false + else { + val ix = i0 >> LogWL + if (ix == cacheIndex || ix == cacheIndex+1) { + val i = scanLong(if (ix == cacheIndex) cache0 else cache1, i0 & (WordLength - 1)) + if (i >= 0) { + i0 = (i0 & ~(WordLength - 1)) | i + found = (i0 < iN) + found + } + else { + i0 = (i0 & ~(WordLength - 1)) + WordLength + findNext() + } + } + else if (underlying eq null) { + i0 = iN + found = false + found + } + else { + cacheIndex = ix + cache0 = underlying.word(cacheIndex) + cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.word(cacheIndex+1) + findNext() + } + } + + def semiclone(half: Int): BitSetStepper = + if (underlying == null) { + val ans = new BitSetStepper(null, cache0, cache1, i0, half, cacheIndex) + ans.found = found + i0 = half + found = false + ans + } + else { + // Set up new stepper + val ixNewN = (half - 1) >> LogWL + val ans = + new BitSetStepper(if (ixNewN <= cacheIndex + 1) null else underlying, cache0, cache1, i0, half, cacheIndex) + if (found) ans.found = true + + // Advance old stepper to breakpoint + val ixOld0 = half >> LogWL + if (ixOld0 > cacheIndex + 1) { + cache0 = underlying.word(ixOld0) + cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.word(ixOld0+1) + cacheIndex = ixOld0 + i0 = half + found = false + } + + // Return new stepper + ans + } + + @annotation.tailrec + private[this] def scanLong(bits: Long, from: Int): Int = + if (from >= WordLength) -1 + else if ((bits & (1L << from)) != 0) from + else scanLong(bits, from + 1) + + def nextStep(): Int = + if (found || findNext()) { + found = false + val ans = i0 + i0 += 1 + ans + } + else Stepper.throwNSEE() +} + +private[collection] object BitSetStepper { + def from(bs: scala.collection.BitSetOps[_]): IntStepper with EfficientSplit = + new BitSetStepper( + if (bs.nwords <= 2) null else bs, + if (bs.nwords <= 0) -1L else bs.word(0), + if (bs.nwords <= 1) -1L else bs.word(1), + 0, + bs.nwords * BitSetOps.WordLength, + 0 + ) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala new file mode 100644 index 000000000000..466e6c440f45 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala @@ -0,0 +1,245 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.immutable.Node + +/** A stepper that is a slightly elaborated version of the ChampBaseIterator; + * the main difference is that it knows when it should stop instead of running + * to the end of all trees. + */ +private[collection] abstract class ChampStepperBase[ + A, T <: Node[T], Sub >: Null, Semi <: Sub with ChampStepperBase[A, T, _, _] +](protected var maxSize: Int) +extends EfficientSplit { + import Node.MaxDepth + + // Much of this code is identical to ChampBaseIterator. If you change that, look here too! + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private var currentStackLevel: Int = -1 + private var nodeCursorsAndLengths: Array[Int] = _ + private var nodes: Array[T] = _ + + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + def initRoot(rootNode: T): Unit = { + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + false + } + + def characteristics: Int = 0 + + def estimateSize: Long = if (hasStep) maxSize else 0L + + def semiclone(): Semi + + final def hasStep: Boolean = maxSize > 0 && { + val ans = (currentValueCursor < currentValueLength) || searchNextValueNode() + if (!ans) maxSize = 0 + ans + } + + final def trySplit(): Sub = + if (!hasStep) null + else { + var fork = 0 + while (fork <= currentStackLevel && nodeCursorsAndLengths(2*fork) >= nodeCursorsAndLengths(2*fork + 1)) fork += 1 + if (fork > currentStackLevel && currentValueCursor > currentValueLength -2) null + else { + val semi = semiclone() + semi.maxSize = maxSize + semi.currentValueCursor = currentValueCursor + semi.currentValueNode = currentValueNode + if (fork > currentStackLevel) { + // Just need to finish the current node + semi.currentStackLevel = -1 + val i = (currentValueCursor + currentValueLength) >>> 1 + semi.currentValueLength = i + currentValueCursor = i + } + else { + // Need (at least some of) the full stack, so make an identical copy + semi.nodeCursorsAndLengths = java.util.Arrays.copyOf(nodeCursorsAndLengths, nodeCursorsAndLengths.length) + semi.nodes = java.util.Arrays.copyOf(nodes.asInstanceOf[Array[Node[T]]], nodes.length).asInstanceOf[Array[T]] + semi.currentStackLevel = currentStackLevel + semi.currentValueLength = currentValueLength + + // Split the top level of the stack where there's still something to split + // Could make this more efficient by duplicating code from searchNextValueNode + // instead of setting up for it to run normally. But splits tend to be rare, + // so it's not critically important. + // + // Note that this split can be kind of uneven; if we knew how many child nodes there + // were we could do better. + val i = (nodeCursorsAndLengths(2*fork) + nodeCursorsAndLengths(2*fork + 1)) >>> 1 + semi.nodeCursorsAndLengths(2*fork + 1) = i + var j = currentStackLevel + while (j > fork) { + nodeCursorsAndLengths(2*j) = nodeCursorsAndLengths(2*j + 1) + j -= 1 + } + nodeCursorsAndLengths(2*fork) = i + searchNextValueNode() + } + semi + } + } +} + + +private[collection] final class AnyChampStepper[A, T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => A) +extends ChampStepperBase[A, T, AnyStepper[A], AnyChampStepper[A, T]](_maxSize) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): AnyChampStepper[A, T] = new AnyChampStepper[A, T](0, extract) +} +private[collection] object AnyChampStepper { + def from[A, T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => A): AnyChampStepper[A, T] = { + val ans = new AnyChampStepper[A, T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class DoubleChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Double) +extends ChampStepperBase[Double, T, DoubleStepper, DoubleChampStepper[T]](_maxSize) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): DoubleChampStepper[T] = new DoubleChampStepper[T](0, extract) +} +private[collection] object DoubleChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Double): DoubleChampStepper[T] = { + val ans = new DoubleChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class IntChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Int) +extends ChampStepperBase[Int, T, IntStepper, IntChampStepper[T]](_maxSize) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): IntChampStepper[T] = new IntChampStepper[T](0, extract) +} +private[collection] object IntChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Int): IntChampStepper[T] = { + val ans = new IntChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class LongChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Long) +extends ChampStepperBase[Long, T, LongStepper, LongChampStepper[T]](_maxSize) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): LongChampStepper[T] = new LongChampStepper[T](0, extract) +} +private[collection] object LongChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Long): LongChampStepper[T] = { + val ans = new LongChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala new file mode 100644 index 000000000000..2d1f88d02930 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit + +/** Abstracts all the generic operations of stepping over a collection + * that has an indexable ordering but may have gaps. + * + * For collections that are guaranteed to not have gaps, use `IndexedStepperBase` instead. + */ +private[convert] abstract class InOrderStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) +extends EfficientSplit { + /** Set `true` if the element at `i0` is known to be there. `false` if either not known or is a gap. + */ + protected def found: Boolean + + /** Advance `i0` over any gaps, updating internal state so `found` is correct at the new position. + * Returns the new value of `found`. + */ + protected def findNext(): Boolean + + protected def semiclone(half: Int): Semi + + final def hasStep: Boolean = found || findNext() + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala new file mode 100644 index 000000000000..136ac8d2dcc3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala @@ -0,0 +1,44 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): AnyIndexedSeqStepper[A] = new AnyIndexedSeqStepper[A](underlying, i0, half) +} + +private[collection] class DoubleIndexedSeqStepper[CC <: collection.IndexedSeqOps[Double, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleIndexedSeqStepper[CC]](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleIndexedSeqStepper[CC] = new DoubleIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class IntIndexedSeqStepper[CC <: collection.IndexedSeqOps[Int, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntIndexedSeqStepper[CC]](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntIndexedSeqStepper[CC] = new IntIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class LongIndexedSeqStepper[CC <: collection.IndexedSeqOps[Long, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongIndexedSeqStepper[CC]](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongIndexedSeqStepper[CC] = new LongIndexedSeqStepper[CC](underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala new file mode 100644 index 000000000000..4670ccc56bfc --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit + +/** Abstracts all the generic operations of stepping over an indexable collection */ +private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) + extends EfficientSplit { + protected def semiclone(half: Int): Semi + + def hasStep: Boolean = i0 < iN + + def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala new file mode 100644 index 000000000000..68b318c04c9c --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala @@ -0,0 +1,129 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} +import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} + +private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) + extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) + with AnyStepper[A] { + protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper(null) + + def nextStep(): A = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): AnyStepper[A] = if (proxied ne null) proxied.trySplit() else { + val acc = new AnyAccumulator[A] + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) + extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying) + with DoubleStepper { + protected def semiclone(): DoubleIteratorStepper = new DoubleIteratorStepper(null) + + def nextStep(): Double = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): DoubleStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new DoubleAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) + extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying) + with IntStepper { + protected def semiclone(): IntIteratorStepper = new IntIteratorStepper(null) + + def nextStep(): Int = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): IntStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new IntAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class LongIteratorStepper(_underlying: Iterator[Long]) + extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying) + with LongStepper { + protected def semiclone(): LongIteratorStepper = new LongIteratorStepper(null) + + def nextStep(): Long = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): LongStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new LongAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Common functionality for Steppers that step through an Iterator, caching the results as needed when a split is requested. */ +private[convert] abstract class IteratorStepperBase[A, SP >: Null <: Stepper[A], Semi <: SP](final protected val underlying: Iterator[A]) { + final protected var nextChunkSize = 16 + final protected var proxied: SP = null + protected def semiclone(): Semi // Must initialize with null iterator! + def characteristics: Int = if (proxied ne null) Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED else Spliterator.ORDERED + def estimateSize: Long = if (proxied ne null) proxied.estimateSize else Long.MaxValue + def hasStep: Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala new file mode 100644 index 000000000000..89e17bbf467c --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} +import scala.collection.immutable.NumericRange + +private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) +extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new AnyNumericRangeStepper[A](underlying, i0, half) +} + +private[collection] class IntNumericRangeStepper(underlying: NumericRange[Int], _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, IntNumericRangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new IntNumericRangeStepper(underlying, i0, half) +} + +private[collection] class LongNumericRangeStepper(underlying: NumericRange[Long], _i0: Int, _iN: Int) +extends IndexedStepperBase[LongStepper, LongNumericRangeStepper](_i0, _iN) +with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new LongNumericRangeStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala new file mode 100644 index 000000000000..282ddb4aa2ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{IntStepper, Stepper} + +/** Implements Stepper on an integer Range. You don't actually need the Range to do this, + * so only the relevant parts are included. Because the arguments are protected, they are + * not error-checked; `Range` is required to provide valid arguments. + */ +private[collection] final class RangeStepper(protected var myNext: Int, myStep: Int, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, RangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = myNext + myNext += myStep + i0 += 1 + ans + } + else Stepper.throwNSEE() + protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) + override def trySplit(): IntStepper = { + val old_i0 = i0 + val ans = super.trySplit() + myNext += (i0 - old_i0) * myStep + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala new file mode 100644 index 000000000000..8990f462b4fd --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.lang.Character.{charCount, isLowSurrogate} +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{IntStepper, Stepper} + +/** Implements `Stepper` on a `String` where you step through chars packed into `Int`. + */ +private[collection] final class CharStringStepper(underlying: String, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, CharStringStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { val j = i0; i0 += 1; underlying.charAt(j) } + else Stepper.throwNSEE() + + def semiclone(half: Int): CharStringStepper = new CharStringStepper(underlying, i0, half) +} + +/** Implements `Stepper` on a `String` where you step through code points. + */ +private[collection] final class CodePointStringStepper(underlying: String, private var i0: Int, private var iN: Int) +extends IntStepper with EfficientSplit { + def characteristics: Int = Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.ORDERED + def estimateSize: Long = iN - i0 + def hasStep: Boolean = i0 < iN + def nextStep(): Int = { + if (hasStep) { + val cp = underlying.codePointAt(i0) + i0 += charCount(cp) + cp + } + else Stepper.throwNSEE() + } + def trySplit(): CodePointStringStepper = + if (iN - 3 > i0) { + var half = (i0 + iN) >>> 1 + if (isLowSurrogate(underlying.charAt(half))) half -= 1 + val ans = new CodePointStringStepper(underlying, i0, half) + i0 = half + ans + } + else null +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala new file mode 100644 index 000000000000..cac041a5237b --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ + +private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( + protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int +) +extends EfficientSplit { + // Always holds table(i0); if `null` it is time to switch to the next element + protected var myCurrent: I = if (i0 < iN) table(i0) else null + + // Only call this when `myCurrent` is null (meaning we need to advance) + @annotation.tailrec + protected final def findNextCurrent(): Boolean = + if (i0 < iN) { + i0 += 1 + if (i0 >= iN) false + else { + myCurrent = table(i0) + if (myCurrent eq null) findNextCurrent() + else true + } + } + else false + + protected def semiclone(half: Int): Semi + + def characteristics: Int = 0 + + def estimateSize: Long = if (!hasStep) { maxLength = 0; 0 } else maxLength + + def hasStep: Boolean = (myCurrent ne null) || findNextCurrent() + + def trySplit(): Sub = { + if (iN-1 > i0 && maxLength > 0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + ans.myCurrent = myCurrent + myCurrent = table(half) + var inLeft = if (ans.myCurrent ne null) 1 else 0 + var inRight = if (myCurrent ne null) 1 else 0 + if (iN - i0 < 32) { + var i = i0+1 + while (i < half && (table(i) ne null)) { i += 1; inLeft += 1 } + i = half+1 + while (i < iN && (table(i) ne null)) { i += 1; inRight += 1 } + } + maxLength -= inLeft + ans.maxLength -= inRight + i0 = half + ans + } + else null + } +} + + +private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => A, _i0: Int, _iN: Int +) +extends TableStepperBase[A, I, AnyStepper[A], AnyTableStepper[A, I]](_maxLength, _table, _i0, _iN) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): AnyTableStepper[A, I] = new AnyTableStepper[A, I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class DoubleTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Double, _i0: Int, _iN: Int +) +extends TableStepperBase[Double, I, DoubleStepper, DoubleTableStepper[I]](_maxLength, _table, _i0, _iN) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): DoubleTableStepper[I] = new DoubleTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class IntTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Int, _i0: Int, _iN: Int +) +extends TableStepperBase[Int, I, IntStepper, IntTableStepper[I]](_maxLength, _table, _i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): IntTableStepper[I] = new IntTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class LongTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Long, _i0: Int, _iN: Int +) +extends TableStepperBase[Long, I, LongStepper, LongTableStepper[I]](_maxLength, _table, _i0, _iN) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): LongTableStepper[I] = new LongTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + diff --git a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala new file mode 100644 index 000000000000..332ec65d85fd --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala @@ -0,0 +1,131 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( + _i0: Int, + _iN: Int, + protected val displayN: Int, + protected val trunk: Array[AnyRef] +) +extends IndexedStepperBase[Sub, Semi](_i0, _iN) { + protected var index: Int = 32 // Force an advanceData on the first element + protected var leaves: Array[AnyRef] = null + protected var index1: Int = 32 // Force advanceData to defer to initTo on the first element + protected var twigs: Array[AnyRef] = null + + protected final def advanceData(iX: Int): Unit = { + index1 += 1 + if (index1 >= 32) initTo(iX) + else { + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = 0 + } + } + protected final def initTo(iX: Int): Unit = displayN match { + case 0 => + leaves = trunk + index = iX + case 1 => + twigs = trunk + index1 = iX >>> 5 + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + case _ => + var n = displayN + var dataN = trunk + while (n > 2) { + dataN = dataN((iX >> (5*n)) & 0x1F).asInstanceOf[Array[AnyRef]] + n -= 1 + } + twigs = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index1 = (iX >> 5) & 0x1F + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + } +} + +private[collection] class AnyVectorStepper[A](_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[AnyStepper[A], AnyVectorStepper[A]](_i0, _iN, _displayN, _trunk) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[A] + } else Stepper.throwNSEE() + def semiclone(half: Int): AnyVectorStepper[A] = { + val ans = new AnyVectorStepper[A](i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class DoubleVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[DoubleStepper, DoubleVectorStepper](_i0, _iN, _displayN, _trunk) +with DoubleStepper { + def nextStep(): Double = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Double] + } else Stepper.throwNSEE() + def semiclone(half: Int): DoubleVectorStepper = { + val ans = new DoubleVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[IntStepper, IntVectorStepper](_i0, _iN, _displayN, _trunk) +with IntStepper { + def nextStep(): Int = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Int] + } else Stepper.throwNSEE() + def semiclone(half: Int): IntVectorStepper = { + val ans = new IntVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[LongStepper, LongVectorStepper](_i0, _iN, _displayN, _trunk) +with LongStepper { + def nextStep(): Long = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Long] + } else Stepper.throwNSEE() + def semiclone(half: Int): LongVectorStepper = { + val ans = new LongVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala new file mode 100644 index 000000000000..4c64dec9dc1f --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala @@ -0,0 +1,50 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + + +/** Some bit operations. + * + * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for + * an explanation of unsignedCompare. + */ +private[collection] object BitOperations { + trait Int { + type Int = scala.Int + def zero(i: Int, mask: Int) = (i & mask) == 0 + def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix + def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) + def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) + def complement(i: Int) = (-1) ^ i + def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) + def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) + } + object Int extends Int + + trait Long { + type Long = scala.Long + def zero(i: Long, mask: Long) = (i & mask) == 0L + def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix + def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) + def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) + def complement(i: Long) = (-1L) ^ i + def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) + def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) + } + object Long extends Long +} diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..69b4b3d96e61 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala new file mode 100644 index 000000000000..bf2eab6bb2a6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala @@ -0,0 +1,164 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +/** A trait which can be used to avoid code duplication when defining extension + * methods that should be applicable both to existing Scala collections (i.e., + * types extending `Iterable`) as well as other (potentially user-defined) + * types that could be converted to a Scala collection type. This trait + * makes it possible to treat Scala collections and types that can be implicitly + * converted to a collection type uniformly. For example, one can provide + * extension methods that work both on collection types and on `String`s (`String`s + * do not extend `Iterable`, but can be converted to `Iterable`) + * + * `IsIterable` provides three members: + * + * 1. type member `A`, which represents the element type of the target `Iterable[A]` + * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type + * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. + * + * ===Usage=== + * + * One must provide `IsIterable` as an implicit parameter type of an implicit + * conversion. Its usage is shown below. Our objective in the following example + * is to provide a generic extension method `mapReduce` to any type that extends + * or can be converted to `Iterable`. In our example, this includes + * `String`. + * + * {{{ + * import scala.collection.{Iterable, IterableOps} + * import scala.collection.generic.IsIterable + * + * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { + * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { + * val iter = it(coll).iterator + * var res = mapper(iter.next()) + * while (iter.hasNext) + * res = reducer(res, mapper(iter.next())) + * res + * } + * } + * + * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = + * new ExtensionMethods(coll, it) + * + * // See it in action! + * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 + * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 + *}}} + * + * Here, we begin by creating a class `ExtensionMethods` which contains our + * `mapReduce` extension method. + * + * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where + * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. + * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to + * call the `iterator` method on it. + * The remaining of the implementation is straightforward. + * + * The `withExtensions` implicit conversion makes the `mapReduce` operation available + * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. + * Note how we keep track of the precise type of the implicit `it` argument by using the + * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that + * so that the information carried by the type members `A` and `C` of the `it` argument + * is not lost. + * + * When the `mapReduce` method is called on some type of which it is not + * a member, implicit search is triggered. Because implicit conversion + * `withExtensions` is generic, it will be applied as long as an implicit + * value of type `IsIterable[Repr]` can be found. Given that the + * `IsIterable` companion object contains implicit members that return values of type + * `IsIterable`, this requirement is typically satisfied, and the chain + * of interactions described in the previous paragraph is set into action. + * (See the `IsIterable` companion object, which contains a precise + * specification of the available implicits.) + * + * ''Note'': Currently, it's not possible to combine the implicit conversion and + * the class with the extension methods into an implicit class due to + * limitations of type inference. + * + * ===Implementing `IsIterable` for New Types=== + * + * One must simply provide an implicit value of type `IsIterable` + * specific to the new type, or an implicit conversion which returns an + * instance of `IsIterable` specific to the new type. + * + * Below is an example of an implementation of the `IsIterable` trait + * where the `Repr` type is `Range`. + * + *{{{ + * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = + * new IsIterable[Range] { + * type A = Int + * type C = IndexedSeq[Int] + * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll + * } + *}}} + * + * (Note that in practice the `IsIterable[Range]` instance is already provided by + * the standard library, and it is defined as an `IsSeq[Range]` instance) + */ +trait IsIterable[Repr] extends IsIterableOnce[Repr] { + + /** The type returned by transformation operations that preserve the same elements + * type (e.g. `filter`, `take`). + * + * In practice, this type is often `Repr` itself, excepted in the case + * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. + */ + type C + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ + def apply(coll: Repr): IterableOps[A, Iterable, C] + +} + +object IsIterable extends IsIterableLowPriority { + + // Straightforward case: IterableOps subclasses + implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = + new IsIterable[CC0[A0]] { + type A = A0 + type C = CC0[A0] + def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll + } + + // The `BitSet` type can not be unified with the `CC0` parameter of + // the above definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = + new IsIterable[C0] { + type A = Int + type C = C0 + def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll + } + +} + +trait IsIterableLowPriority { + + // Makes `IsSeq` instances visible in `IsIterable` companion + implicit def isSeqLikeIsIterable[Repr](implicit + isSeqLike: IsSeq[Repr] + ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike + + // Makes `IsMap` instances visible in `IsIterable` companion + implicit def isMapLikeIsIterable[Repr](implicit + isMapLike: IsMap[Repr] + ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala new file mode 100644 index 000000000000..7d7293037bd4 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala @@ -0,0 +1,71 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `IterableOnce[A]`. + * + * This type enables simple enrichment of `IterableOnce`s with extension + * methods which can make full use of the mechanics of the Scala collections + * framework in their implementation. + * + * Example usage, + * {{{ + * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { + * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { + * val b = bf.newBuilder(coll) + * for(e <- it(coll).iterator) f(e) foreach (b +=) + * b.result() + * } + * } + * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = + * new FilterMapImpl(coll, it) + * + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + */ +trait IsIterableOnce[Repr] { + + /** The type of elements we can traverse over (e.g. `Int`). */ + type A + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + val conversion: Repr => IterableOnce[A] = apply(_) + + /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ + def apply(coll: Repr): IterableOnce[A] + +} + +object IsIterableOnce extends IsIterableOnceLowPriority { + + // Straightforward case: IterableOnce subclasses + implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = + new IsIterableOnce[CC0[A0]] { + type A = A0 + def apply(coll: CC0[A0]): IterableOnce[A0] = coll + } + +} + +trait IsIterableOnceLowPriority { + + // Makes `IsIterable` instance visible in `IsIterableOnce` companion + implicit def isIterableLikeIsIterableOnce[Repr](implicit + isIterableLike: IsIterable[Repr] + ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala new file mode 100644 index 000000000000..19f75cf7bced --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import IsMap.Tupled +import scala.collection.immutable.{IntMap, LongMap} + +/** + * Type class witnessing that a collection type `Repr` + * has keys of type `K`, values of type `V` and has a conversion to + * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. + * + * This type enables simple enrichment of `Map`s with extension methods. + * + * @see [[scala.collection.generic.IsIterable]] + * @tparam Repr Collection type (e.g. `Map[Int, String]`) + */ +trait IsMap[Repr] extends IsIterable[Repr] { + + /** The type of keys */ + type K + + /** The type of values */ + type V + + type A = (K, V) + + /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` + * + * @note The third type parameter of the returned `MapOps` value is + * still `Iterable` (and not `Map`) because `MapView[K, V]` only + * extends `MapOps[K, V, View, View[A]]`. + */ + override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] + +} + +object IsMap { + + /** Convenient type level function that takes a unary type constructor `F[_]` + * and returns a binary type constructor that tuples its parameters and passes + * them to `F`. + * + * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. + */ + type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } + + // Map collections + implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = CC0[K0, V0] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c + } + + // MapView + implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = View[(K, V)] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c + } + + // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition + implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = + new IsMap[mutable.AnyRefMap[K0, V0]] { + type K = K0 + type V = V0 + type C = mutable.AnyRefMap[K0, V0] + def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c + } + + // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters + implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = + new IsMap[IntMap[V0]] { + type K = Int + type V = V0 + type C = IntMap[V0] + def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c + } + + // LongMap is in a similar situation as IntMap + implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = + new IsMap[LongMap[V0]] { + type K = Long + type V = V0 + type C = LongMap[V0] + def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c + } + + // mutable.LongMap is in a similar situation as LongMap and IntMap + implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = + new IsMap[mutable.LongMap[V0]] { + type K = Long + type V = V0 + type C = mutable.LongMap[V0] + def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c + } + + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala new file mode 100644 index 000000000000..69ea27d087d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import scala.reflect.ClassTag + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for + * some types `A` and `C`. + * + * This type enables simple enrichment of `Seq`s with extension methods which + * can make full use of the mechanics of the Scala collections framework in + * their implementation. + * + * @see [[scala.collection.generic.IsIterable]] + */ +trait IsSeq[Repr] extends IsIterable[Repr] { + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` + * + * @note The second type parameter of the returned `SeqOps` value is + * still `Iterable` (and not `Seq`) because `SeqView[A]` only + * extends `SeqOps[A, View, View[A]]`. + */ + def apply(coll: Repr): SeqOps[A, Iterable, C] +} + +object IsSeq { + + private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = + new IsSeq[Seq[Any]] { + type A = Any + type C = Any + def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll + } + + implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = + seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] + + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsSeq[CC0[A0]] { + type A = A0 + type C = View[A] + def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + } + + implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = + new IsSeq[String] { + type A = Char + type C = String + def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = + new SeqOps[Char, immutable.ArraySeq, String] { + def length: Int = s.length + def apply(i: Int): Char = s.charAt(i) + def toIterable: Iterable[Char] = new immutable.WrappedString(s) + protected[this] def coll: String = s + protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString + def iterableFactory: IterableFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + override def empty: String = "" + protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder + def iterator: Iterator[Char] = s.iterator + } + } + + implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = + new IsSeq[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll + } + + implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + new IsSeq[Array[A0]] { + type A = A0 + type C = Array[A0] + def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = + new SeqOps[A, mutable.ArraySeq, Array[A]] { + def apply(i: Int): A = a(i) + def length: Int = a.length + def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + protected def coll: Array[A] = a + protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) + def iterableFactory: IterableFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + override def empty: Array[A] = Array.empty[A] + protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder + def iterator: Iterator[A] = a.iterator + } + } + + // `Range` can not be unified with the `CC0` parameter of the + // `seqOpsIsSeq` definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = + new IsSeq[C0] { + type A = Int + type C = immutable.IndexedSeq[Int] + def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll + } + +} diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala new file mode 100644 index 000000000000..223997f4e972 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic + +/** This trait represents collection-like objects that can be reduced + * using a '+' operator. It defines variants of `-` and `--` + * as convenience methods in terms of single-element removal `-`. + * + * @tparam A the type of the elements of the $coll. + * @tparam Repr the type of the $coll itself + * @define coll collection + * @define Coll Subtractable + */ +@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") +trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => + + /** The representation object of type `Repr` which contains the collection's elements + */ + protected def repr: Repr + + /** Creates a new $coll from this $coll with an element removed. + * @param elem the element to remove + * @return a new collection that contains all elements of the current $coll + * except one less occurrence of `elem`. + */ + def -(elem: A): Repr + + /** Creates a new $coll from this $coll with some elements removed. + * + * This method takes two or more elements to be removed. Another overloaded + * variant of this method handles the case where a single element is + * removed. + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the given elements. + */ + def -(elem1: A, elem2: A, elems: A*): Repr = + this - elem1 - elem2 -- elems + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param xs the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) +} diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala new file mode 100644 index 000000000000..0c16aa04dc98 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/package.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object generic { + @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") + type Clearable = scala.collection.mutable.Clearable + + @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") + type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] + + @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") + type Growable[-A] = scala.collection.mutable.Growable[A] + + @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") + type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] + + @deprecated("Use IsIterable instead", "2.13.0") + type IsTraversableLike[Repr] = IsIterable[Repr] + + @deprecated("Use IsIterableOnce instead", "2.13.0") + type IsTraversableOnce[Repr] = IsIterableOnce[Repr] +} diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala new file mode 100644 index 000000000000..978c63034f4a --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala @@ -0,0 +1,685 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.runtime.ScalaRunTime +import scala.util.Sorting +import scala.util.hashing.MurmurHash3 + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define coll immutable array + * @define Coll `ArraySeq` + */ +sealed abstract class ArraySeq[+A] + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] + with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] + with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] + with Serializable { + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + protected def elemTag: ClassTag[_] + + override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged + + /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break + * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. + * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an + * array of a supertype or subtype of the element type. */ + def unsafeArray: Array[_] + + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq + protected def iterableEvidence: ClassTag[A @uncheckedVariance] = elemTag.asInstanceOf[ClassTag[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit + + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): A + + override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { + val dest = new Array[Any](length) + Array.copy(unsafeArray, 0, dest, 0, length) + dest(index) = elem + ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] + } + + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)) + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + + override def prepended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] + + override def appended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + + /** Fast concatenation of two [[ArraySeq]]s. + * + * @return null if optimisation not possible. + */ + private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { + // Optimise concatenation of two ArraySeqs + // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast + if (isEmpty) + that + else if (that.isEmpty) + this + else { + val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] + val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] + val mismatch = thisIsObj != thatIsObj + if (mismatch) + // Combining primatives and objects: abort + null + else if (thisIsObj) { + // A and B are objects + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[B]] + val len = ax.length + ay.length + val a = new Array[AnyRef](len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } else { + // A is a primative and B = A. Use this instance's protected ClassTag. + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[A]] + val len = ax.length + ay.length + val a = iterableEvidence.newArray(len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + } + } + + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): ArraySeq[B] = { + def genericResult = { + val k = suffix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(unsafeArray) + b.addAll(suffix) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + suffix match { + case that: ArraySeq[_] => + val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): ArraySeq[B] = { + def genericResult = { + val k = prefix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + unsafeArray.length) + b.addAll(unsafeArray) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + prefix match { + case that: ArraySeq[_] => + val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def zip[B](that: collection.IterableOnce[B]): ArraySeq[(A, B)] = + that match { + case bs: ArraySeq[B] => + ArraySeq.tabulate(length min bs.length) { i => + (apply(i), bs(i)) + } + case _ => + strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) + } + + override def take(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).take(n)).asInstanceOf[ArraySeq[A]] + + override def takeRight(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + + override def drop(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).drop(n)).asInstanceOf[ArraySeq[A]] + + override def dropRight(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + + override def slice(from: Int, until: Int): ArraySeq[A] = + if (from <= 0 && unsafeArray.length <= until) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + + override def foldLeft[B](z: B)(f: (B, A) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = 0 + while (i < array.length) { + val a = array(i).asInstanceOf[A] + b = f(b, a) + i += 1 + } + b + } + + override def foldRight[B](z: B)(f: (A, B) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = array.length + while (i > 0) { + i -= 1 + val a = array(i).asInstanceOf[A] + b = f(a, b) + } + b + } + + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).tail).asInstanceOf[ArraySeq[A]] + + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).reverse).asInstanceOf[ArraySeq[A]] + + override protected[this] def className = "ArraySeq" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(unsafeArray, 0, xs, start, copied) + } + copied + } + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = + if(unsafeArray.length <= 1) this + else { + val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] + } +} + +/** + * $factoryInfo + * @define coll immutable array + * @define Coll `ArraySeq` + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) + + def empty[A : ClassTag]: ArraySeq[A] = emptyImpl + + def from[A](it: scala.collection.IterableOnce[A])(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + case as: ArraySeq[A] => as + case _ => unsafeWrapArray(Array.from[A](it)) + } + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = + ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + + override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { + val elements = Array.ofDim[A](scala.math.max(n, 0)) + var i = 0 + while (i < n) { + ScalaRunTime.array_update(elements, i, f(i)) + i = i + 1 + } + ArraySeq.unsafeWrapArray(elements) + } + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. Any changes to wrapped array will break the expected immutability. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): T = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any): Boolean = that match { + case that: ofRef[_] => + Array.equals( + this.unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { + if(unsafeArray.length <= 1) this + else { + val a = unsafeArray.clone() + Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) + new ArraySeq.ofRef(a) + } + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) + else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { + protected def elemTag = ClassTag.Byte + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Byte = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = + if(length <= 1) this + else if(ord eq Ordering.Byte) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofByte(a) + } else super.sorted[B] + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { + protected def elemTag = ClassTag.Short + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Short = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = + if(length <= 1) this + else if(ord eq Ordering.Short) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofShort(a) + } else super.sorted[B] + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { + protected def elemTag = ClassTag.Char + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Char = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = + if(length <= 1) this + else if(ord eq Ordering.Char) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofChar(a) + } else super.sorted[B] + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { + protected def elemTag = ClassTag.Int + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Int = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = + if(length <= 1) this + else if(ord eq Ordering.Int) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofInt(a) + } else super.sorted[B] + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { + protected def elemTag = ClassTag.Long + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Long = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = + if(length <= 1) this + else if(ord eq Ordering.Long) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofLong(a) + } else super.sorted[B] + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { + protected def elemTag = ClassTag.Float + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Float = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { + protected def elemTag = ClassTag.Double + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Double = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { + protected def elemTag = ClassTag.Boolean + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Boolean = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = + if(length <= 1) this + else if(ord eq Ordering.Boolean) { + val a = unsafeArray.clone() + Sorting.stableSort(a) + new ArraySeq.ofBoolean(a) + } else super.sorted[B] + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { + protected def elemTag = ClassTag.Unit + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Unit = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala new file mode 100644 index 000000000000..9461264850a9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -0,0 +1,375 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import BitSetOps.{LogWL, updateArray} +import mutable.Builder +import scala.annotation.{implicitNotFound, nowarn} + +/** A class for immutable bitsets. + * $bitsetinfo + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] + * section on `Immutable BitSets` for more information. + * + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +sealed abstract class BitSet + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + override def unsorted: Set[Int] = this + + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) + + def incl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) this + else { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + } + + def excl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } else this + } + + /** Update word at index `idx`; enlarge set if `idx` outside range of set. + */ + protected def updateWord(idx: Int, w: Long): BitSet + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) +} + +/** + * $factoryInfo + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + it match { + case bs: BitSet => bs + case _ => (newBuilder ++= it).result() + } + + final val empty: BitSet = new BitSet1(0L) + + def newBuilder: Builder[Int, BitSet] = + mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) + + private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSetN(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else new BitSetN(elems) + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet1(val elems: Long) extends BitSet { + protected[collection] def nwords = 1 + protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet1(w) + else if (idx == 1) createSmall(elems, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case _ => + val newElems = elems & ~bs.word(0) + if (newElems == 0L) this.empty else new BitSet1(newElems) + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) + if (_elems == 0L) this.empty else new BitSet1(_elems) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { + protected[collection] def nwords = 2 + protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet2(w, elems1) + else if (idx == 1) createSmall(elems0, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case 1 => + new BitSet2(elems0 & ~bs.word(0), elems1) + case _ => + val _elems0 = elems0 & ~bs.word(0) + val _elems1 = elems1 & ~bs.word(1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } else { + new BitSet1(_elems0) + } + } else { + new BitSet2(_elems0, _elems1) + } + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) + val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } + else new BitSet1(_elems0) + } + else new BitSet2(_elems0, _elems1) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSetN(val elems: Array[Long]) extends BitSet { + protected[collection] def nwords = elems.length + + protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L + + protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. Two extra concerns for optimization are described below. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + * + * Tracking Changes: + * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, + * we check each word for if it has changed from its corresponding word in `this`. Once a single change is + * detected, we stop checking because the cost of the new Array must be paid anyways. + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = oldFirstWord & ~bs.word(0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } else { + var i = bsnwords - 1 + var anyChanges = false + var currentWord = 0L + while (i >= 0 && !anyChanges) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newElems = elems.clone() + newElems(i + 1) = currentWord + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + this.fromBitMaskNoCopy(newElems) + } else { + this + } + } + case _ => super.diff(that) + } + + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = nwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } + + override def toBitMask: Array[Long] = elems.clone() + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala new file mode 100644 index 000000000000..711332567b0f --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala @@ -0,0 +1,252 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + + +import java.lang.Integer.bitCount +import java.lang.Math.ceil +import java.lang.System.arraycopy + +private[collection] object Node { + final val HashCodeLength = 32 + + final val BitPartitionSize = 5 + + final val BitPartitionMask = (1 << BitPartitionSize) - 1 + + final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt + + final val BranchingFactor = 1 << BitPartitionSize + + final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask + + final def bitposFrom(mask: Int): Int = 1 << mask + + final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) + + final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) + +} + +private[collection] abstract class Node[T <: Node[T]] { + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): T + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): Any + + def getHash(index: Int): Int + + def cachedJavaKeySetHashCode: Int + + private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = + new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") + + protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a + * depth-first pre-order traversal, which yields first all payload elements of the current + * node before traversing sub-nodes (left to right). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { + + import Node.MaxDepth + + // Note--this code is duplicated to a large extent both in + // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. + // If you change this code, check those also in case they also + // need to be modified. + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] var nodeCursorsAndLengths: Array[Int] = _ + private[this] var nodes: Array[T] = _ + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + + def this(rootNode: T) = { + this() + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + + return false + } + + final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() + +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base + * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { + + import Node.MaxDepth + + protected var currentValueCursor: Int = -1 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) + private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] + + def this(rootNode: T) = { + this() + pushNode(rootNode) + searchNextValueNode() + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = node.payloadArity - 1 + } + + private final def pushNode(node: T): Unit = { + currentStackLevel = currentStackLevel + 1 + + nodeStack(currentStackLevel) = node + nodeIndex(currentStackLevel) = node.nodeArity - 1 + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for rightmost node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 + + if (nodeCursor >= 0) { + val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) + pushNode(nextNode) + } else { + val currNode = nodeStack(currentStackLevel) + popNode() + + if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } + } + } + + return false + } + + final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() + +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala new file mode 100644 index 000000000000..2e8378c4d810 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -0,0 +1,2423 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import java.lang.Integer.bitCount +import java.lang.System.arraycopy + +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable, mutable.ReusableBuilder +import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} +import scala.runtime.AbstractFunction2 +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 + +/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam K the type of the keys contained in this hash set. + * @tparam V the type of the values associated with the keys in this hash map. + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ + +final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) + extends AbstractMap[K, V] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with DefaultSerializable { + + def this() = this(MapNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + override def mapFactory: MapFactory[HashMap] = HashMap + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet + + private final class HashKeySet extends ImmutableKeySet { + + private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = + if (newHashMap eq HashMap.this) this else newHashMap.keySet + private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet + + override def incl(elem: K): Set[K] = { + val originalHash = elem.## + val improvedHash = improve(originalHash) + val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) + newKeySetOrThis(newNode) + } + override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) + override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) + override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) + } + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleIterator[K, V](rootNode) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapKeyIterator[K, V](rootNode) + } + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else new MapValueIterator[K, V](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleReverseIterator[K, V](rootNode) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override final def contains(key: K): Boolean = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) + } + + override def apply(key: K): V = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.apply(key, keyUnimprovedHash, keyHash, 0) + } + + def get(key: K): Option[V] = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.get(key, keyUnimprovedHash, keyHash, 0) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) + } + + @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode) + + def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) + } + + // preemptively overridden in anticipation of performance optimizations + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = + super.updatedWith[V1](key)(remappingFunction) + + def removed(key: K): HashMap[K, V] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) + } + + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]): HashMap[K, V1] = that match { + case hm: HashMap[K, V1] => + if (isEmpty) hm + else { + val newNode = rootNode.concat(hm.rootNode, 0) + if (newNode eq hm.rootNode) hm + else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) + } + case hm: mutable.HashMap[K @unchecked, V @unchecked] => + val iter = hm.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => + val iter = lhm.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case _ => + class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { + var changed = false + var shallowlyMutableNodeMap: Int = 0 + var current: BitmapIndexedMapNode[K, V1] = rootNode + def apply(kv: (K, V1)) = apply(kv._1, kv._2) + def apply(key: K, value: V1): Unit = { + val originalHash = key.## + val improved = improve(originalHash) + if (!changed) { + current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + changed = true + shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + } + } else { + shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) + } + } + } + that match { + case thatMap: Map[K, V1] => + if (thatMap.isEmpty) this + else { + val accum = new accum + thatMap.foreachEntry(accum) + newHashMapOrThis(accum.current) + } + case _ => + val it = that.iterator + if (it.isEmpty) this + else { + val accum = new accum + it.foreach(accum) + newHashMapOrThis(accum.current) + } + } + } + + override def tail: HashMap[K, V] = this - head._1 + + override def init: HashMap[K, V] = this - last._1 + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = reverseIterator.next() + + override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) + + override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) + + /** Applies a function to each key, value, and **original** hash value in this Map */ + @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + override def equals(that: Any): Boolean = + that match { + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) + case _ => super.equals(that) + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be + // immutable. + val hashIterator = new MapKeyValueTupleHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) + // assert(hash == super.hashCode()) + hash + } + } + + override protected[this] def className = "HashMap" + + /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge + * function to resolve any key collisions between the two HashMaps. + * + * @example {{{ + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(2 -> 2, 3 -> 2) + * + * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } + * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) + * + * }}} + * + * @param that the HashMap to merge this HashMap with + * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then + * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to + * `that.concat(this)` + * + * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or + * found in `this` or `that`, it is not defined which value will be chosen. For example: + * + * Colliding multiple results of merging: + * {{{ + * // key `3` collides between a result of merging keys `1` and `2` + * val left = HashMap(1 -> 1, 2 -> 2) + * val right = HashMap(1 -> 1, 2 -> 2) + * + * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } + * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) + * }}} + * Colliding results of merging with other keys: + * {{{ + * // key `2` collides between a result of merging `1`, and existing key `2` + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(1 -> 2) + * + * val merged = left.merged(right)((_,_) => 2 -> 3) + * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) + * }}} + * + */ + def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = + if (mergef == null) { + that ++ this + } else { + if (isEmpty) that + else if (that.isEmpty) this + else if (size == 1) { + val payload@(k, v) = rootNode.getPayload(0) + val originalHash = rootNode.getHash(0) + val improved = improve(originalHash) + + if (that.rootNode.containsKey(k, originalHash, improved, 0)) { + val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) + } + } else if (that.size == 0) { + val thatPayload@(k, v) = rootNode.getPayload(0) + val thatOriginalHash = rootNode.getHash(0) + val thatImproved = improve(thatOriginalHash) + + if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { + val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) + } + } else { + val builder = new HashMapBuilder[K, V1] + rootNode.mergeInto(that.rootNode, builder, 0)(mergef) + builder.result() + } + } + + override def transform[W](f: (K, V) => W): HashMap[K, W] = + newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] + + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashMap.empty + else new HashMap(newRootNode) + } + + override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + if (isEmpty) { + this + } else { + keys match { + case hashSet: HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree + // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` + val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) + if (newRootNode eq rootNode) this + else if (newRootNode.size <= 0) HashMap.empty + else new HashMap(newRootNode) + } + case hashSet: collection.mutable.HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + val iter = hashSet.nodeIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case lhashSet: collection.mutable.LinkedHashSet[K] => + if (lhashSet.isEmpty) { + this + } else { + val iter = lhashSet.entryIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case _ => + val iter = keys.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + } + } + + override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, + // based on the result of applying `p` to its elements and subnodes. + super.partition(p) + } + + override def take(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including + // those nodes in the resulting trie, until `n` total elements have been included. + super.take(n) + } + + override def takeRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and + // and including those nodes in the resulting trie, until `n` total elements have been included. + super.takeRight(n) + } + + override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and + // including those nodes in the resulting trie, until `p` returns `false` + super.takeWhile(p) + } + + override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and + // dropping those nodes in the resulting trie, until `p` returns `true` + super.dropWhile(p) + } + + override def dropRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse + // order, and dropping all nodes until `n` elements have been dropped + super.dropRight(n) + } + + override def drop(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and + // dropping all nodes until `n` elements have been dropped + super.drop(n) + } + + override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and + // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. + // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality + // checks. + super.span(p) + } + +} + +private[immutable] object MapNode { + + private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] + + final val TupleLength = 2 + +} + + +private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 + + def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean + + /** Returns a MapNode with the passed key-value assignment added + * + * @param key the key to add to the MapNode + * @param value the value to associate with `key` + * @param originalHash the original hash of `key` + * @param hash the improved hash of `key` + * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) + * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value + * argument. + * if false, then the key will be inserted if not already present, however if the key is present + * then the passed value will not replace the current value. That is, if `false`, then this + * method has `update if not exists` semantics. + */ + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): MapNode[K, V] + + def hasPayload: Boolean + + def payloadArity: Int + + def getKey(index: Int): K + + def getValue(index: Int): V + + def getPayload(index: Int): (K, V) + + def size: Int + + def foreach[U](f: ((K, V)) => U): Unit + + def foreachEntry[U](f: (K, V) => U): Unit + + def foreachWithHash(f: (K, V, Int) => Unit): Unit + + def transform[W](f: (K, V) => W): MapNode[K, W] + + def copy(): MapNode[K, V] + + def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] + + def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] + + /** Merges this node with that node, adding each resulting tuple to `builder` + * + * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` + * + * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, + * as `this` is, within the left tree + */ + def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit + + /** Returns the exact (equal by reference) key, and value, associated to a given key. + * If the key is not bound to a value, then an exception is thrown + */ + def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) + + /** Adds all key-value pairs to a builder */ + def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit +} + +private final class BitmapIndexedMapNode[K, +V]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { + + releaseFence() + + import MapNode._ + import Node._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] + def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] + + def getPayload(index: Int) = Tuple2( + content(TupleLength * index).asInstanceOf[K], + content(TupleLength * index + 1).asInstanceOf[V]) + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): MapNode[K, V] = + content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] + + def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException(s"key not found: $key") + } + } + + def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) Some(this.getValue(index)) else None + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + None + } + } + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val mask = maskFrom(hash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val payload = getPayload(index) + if (key == payload._1) payload else throw new NoSuchElementException + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException + } + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) getValue(index) else f + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) + } else { + f + } + } + + override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) + (originalHashes(index) == originalHash) && key == getKey(index) + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + false + } + } + + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + if (replaceValue) { + val value0 = this.getValue(index) + if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) + this + else copyAndSetValue(bitpos, key, value) + } else this + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) + + if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) + } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) + } + + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param value the value to set `key` to + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + val value0 = this.getValue(index) + if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + content(idx + 1) = value + } + shallowlyMutableNodeMap + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeHashCode = subNode.cachedJavaKeySetHashCode + + var returnMutableNodeMap = shallowlyMutableNodeMap + + val subNodeNew: MapNode[K, V1] = subNode match { + case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) + if (result ne subNode) { + returnMutableNodeMap |= bitpos + } + result + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode + returnMutableNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = insertElement(originalHashes, dataIx, originalHash) + this.size += 1 + this.cachedJavaKeySetHashCode += keyHash + shallowlyMutableNodeMap + } + } + + def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + + if (key0 == key) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) + if (index == 0) + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) + else + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) + } else copyAndRemoveValue(bitpos, keyHash) + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) + // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] + } else { + // inline value (move to front) + copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + copyAndSetNode(bitpos, subNode, subNodeNew) + } else this + } else this + } + + def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + val newCachedHash = keyHash0 + keyHash1 + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + + if (mask0 < mask1) { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) + } else { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) + new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + //dst(idx) = newKey + dst(idx + 1) = newValue + new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedMapNode[K, V1]( + dataMap, + nodeMap, + dst, + originalHashes, + size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) + } + + def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + // copy 'src' and remove 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) + } + + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the key currently at `bitpos` + * @param node the node to place at `bitpos` beneath `this` + */ + def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = dataMap ^ bitpos + this.nodeMap = nodeMap | bitpos + this.content = dst + this.originalHashes = dstHashes + this.size = size - 1 + node.size + this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, + originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + ) + } + + def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val key = node.getKey(0) + val value = node.getValue(0) + val src = this.content + val dst = new Array[Any](src.length - 1 + TupleLength) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 2 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = key + dst(idxNew + 1) = value + arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + override def foreach[U](f: ((K, V)) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreach(f) + j += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getKey(i), getValue(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachEntry(f) + j += 1 + } + } + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + var i = 0 + val iN = payloadArity // arity doesn't change during this operation + while (i < iN) { + f(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + var i = 0 + val iN = payloadArity + val jN = nodeArity + while (i < iN) { + builder.addOne(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + var j = 0 + while (j < jN) { + getNode(j).buildTo(builder) + j += 1 + } + } + + override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { + var newContent: Array[Any] = null + val iN = payloadArity // arity doesn't change during this operation + val jN = nodeArity // arity doesn't change during this operation + val newContentLength = content.length + var i = 0 + while (i < iN) { + val key = getKey(i) + val value = getValue(i) + val newValue = f(key, value) + if (newContent eq null) { + if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { + newContent = content.clone() + newContent(TupleLength * i + 1) = newValue + } + } else { + newContent(TupleLength * i + 1) = newValue + } + i += 1 + } + + var j = 0 + while (j < jN) { + val node = getNode(j) + val newNode = node.transform(f) + if (newContent eq null) { + if (newNode ne node) { + newContent = content.clone() + newContent(newContentLength - j - 1) = newNode + } + } else + newContent(newContentLength - j - 1) = newNode + j += 1 + } + if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] + else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) + } + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) { + that.buildTo(builder) + return + } else if (bm.size == 0) { + buildTo(builder) + return + } + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + val minIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + { + var index = minIndex + var leftIdx = 0 + var rightIdx = 0 + + while (index < maxIndex) { + val bitpos = bitposFrom(index) + + if ((bitpos & dataMap) != 0) { + val leftKey = getKey(leftIdx) + val leftValue = getValue(leftIdx) + val leftOriginalHash = getHash(leftIdx) + if ((bitpos & bm.dataMap) != 0) { + // left data and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { + builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) + } else { + builder.addOne(leftKey, leftValue, leftOriginalHash) + builder.addOne(rightKey, rightValue, rightOriginalHash) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + // left data and right node + val subNode = bm.getNode(bm.nodeIndex(bitpos)) + val leftImprovedHash = improve(leftOriginalHash) + val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftData and rightNode, just build both children to builder + subNode.buildTo(builder) + builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) + } + } else { + // left data and nothing on right + builder.addOne(leftKey, leftValue, leftOriginalHash) + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + // left node and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + val rightImprovedHash = improve(rightOriginalHash) + + val subNode = getNode(nodeIndex(bitpos)) + val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftNode and rightData, just build both children to builder + subNode.buildTo(builder) + builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) + } + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // left node and right node + getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) + } else { + // left node and nothing on right + getNode(nodeIndex(bitpos)).buildTo(builder) + } + } else if ((bitpos & bm.dataMap) != 0) { + // nothing on left, right data + val dataIndex = bm.dataIndex(bitpos) + builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // nothing on left, right node + bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) + } + + index += 1 + } + } + case _: HashCollisionMapNode[_, _] => + throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") + } + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedMapNode[_, _] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) + } + // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `bm` + var anyChangesMadeSoFar = false + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataRightOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + val leftOriginalHash = getHash(leftIdx) + if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { + leftDataRightDataRightOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { + // nothing from `this` will make it into the result -- return early + return bm + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val rightNode = bm.getNode(rightNodeIdx) + val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) + if (rightNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftKey = getKey(leftDataIdx) + val leftValue = getValue(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + + val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) + + if (updated ne n) { + anyChangesMadeSoFar = true + } + + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + getNode(leftNodeIdx).updated( + key = bm.getKey(rightDataIdx), + value = bm.getValue(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift, + replaceValue = true + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = originalHashes(leftDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedMapNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else bm + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + override def copy(): BitmapIndexedMapNode[K, V] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) * TupleLength + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() + i += 1 + } + new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else MapNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + MapNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize * TupleLength) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) + newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + + + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[MapNode[K, V]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue() + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + if (newSize == 0) { + MapNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) + newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(TupleLength * newDataIndex) = node.getKey(0) + newContent(TupleLength * newDataIndex + 1) = node.getValue(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + } +} + +private final class HashCollisionMapNode[K, +V ]( + val originalHash: Int, + val hash: Int, + var content: Vector[(K, V @uV)] + ) extends MapNode[K, V] { + + import Node._ + + require(content.length >= 2) + + releaseFence() + + private[immutable] def indexOf(key: Any): Int = { + val iter = content.iterator + var i = 0 + while (iter.hasNext) { + if (iter.next()._1 == key) return i + i += 1 + } + -1 + } + + def size: Int = content.length + + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException) + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = + if (this.hash == hash) { + val index = indexOf(key) + if (index >= 0) Some(content(index)._2) else None + } else None + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val index = indexOf(key) + if (index >= 0) content(index) else throw new NoSuchElementException + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { + if (this.hash == hash) { + indexOf(key) match { + case -1 => f + case other => content(other)._2 + } + } else f + } + + override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && indexOf(key) >= 0 + + def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = + this.hash == hash && { + val index = indexOf(key) + index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) + } + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { + val index = indexOf(key) + if (index >= 0) { + if (replaceValue) { + if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { + this + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) + } + } else { + this + } + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) + } + } + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { + if (!this.containsKey(key, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => + val (k, v) = updatedContent(0) + new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) + } + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): MapNode[K, V] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getKey(index: Int): K = getPayload(index)._1 + def getValue(index: Int): V = getPayload(index)._2 + + def getPayload(index: Int): (K, V) = content(index) + + override def getHash(index: Int): Int = originalHash + + def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) + + def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next._1, next._2, originalHash) + } + } + + override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { + val newContent = Vector.newBuilder[(K, W)] + val contentIter = content.iterator + // true if any values have been transformed to a different value via `f` + var anyChanges = false + while(contentIter.hasNext) { + val (k, v) = contentIter.next() + val newValue = f(k, v) + newContent.addOne((k, newValue)) + anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) + } + if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) + else this.asInstanceOf[HashCollisionMapNode[K, W]] + } + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionMapNode[_, _] => + (this eq node) || + (this.hash == node.hash) && + (this.content.length == node.content.length) && { + val iter = content.iterator + while (iter.hasNext) { + val (key, value) = iter.next() + val index = node.indexOf(key) + if (index < 0 || value != node.content(index)._2) { + return false + } + } + true + } + case _ => false + } + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { + case hc: HashCollisionMapNode[K, V1] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[(K, V1)] = null + val iter = content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (hc.indexOf(nextPayload._1) < 0) { + if (newContent eq null) { + newContent = new VectorBuilder[(K, V1)]() + newContent.addAll(hc.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedMapNode[K, V1] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case hc: HashCollisionMapNode[K, V1] => + val iter = content.iterator + val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] + + def rightIndexOf(key: K): Int = { + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i + i += 1 + } + -1 + } + + while (iter.hasNext) { + val nextPayload = iter.next() + val index = rightIndexOf(nextPayload._1) + + if (index == -1) { + builder.addOne(nextPayload) + } else { + val rightPayload = rightArray(index).asInstanceOf[(K, V1)] + rightArray(index) = null + + builder.addOne(mergef(nextPayload, rightPayload)) + } + } + + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) + i += 1 + } + case _: BitmapIndexedMapNode[K, V1] => + throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") + + } + + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + builder.addOne(k, v, originalHash, hash) + } + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + MapNode.empty + } else if (newContentLength == 1) { + val (k, v) = newContent.head + new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + } else if (newContentLength == content.length) this + else new HashCollisionMapNode(originalHash, hash, newContent) + } + + override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def cachedJavaKeySetHashCode: Int = size * hash + +} + +private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val key = currentValueNode.getKey(currentValueCursor) + currentValueCursor += 1 + + key + } + +} + +private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val value = currentValueNode.getValue(currentValueCursor) + currentValueCursor += 1 + + value + } +} + +private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } +} + +private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { + private[this] var hash = 0 + private[this] var value: V = _ + override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) + def next() = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + value = currentValueNode.getValue(currentValueCursor) + currentValueCursor -= 1 + this + } +} + +/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ +private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) { + /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ + def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { + var curr = rootMapNode + while (curr.size > 0 && hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + curr = curr.removed( + key = currentValueNode.getPayload(currentValueCursor), + keyHash = improve(originalHash), + originalHash = originalHash, + shift = 0 + ) + currentValueCursor += 1 + } + curr + } +} + +/** + * $factoryInfo + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + @transient + private final val EmptyMap = new HashMap(MapNode.empty) + + def empty[K, V]: HashMap[K, V] = + EmptyMap.asInstanceOf[HashMap[K, V]] + + def from[K, V](source: collection.IterableOnce[(K, V)]): HashMap[K, V] = + source match { + case hs: HashMap[K, V] => hs + case _ => (newBuilder[K, V] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] +} + + +/** A Builder for a HashMap. + * $multipleResults + */ +private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { + import MapNode._ + import Node._ + + private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashMap as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashMap[K, V] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedMapNode[K, V] = newEmptyRootNode + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (rootNode.size == 0) value + else { + val originalHash = key.## + rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) + } + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap |= bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Upserts a key/value pair into mapNode, mutably */ + private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { + mapNode match { + case bm: BitmapIndexedMapNode[K, V] => + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val key0 = bm.getKey(index) + val key0UnimprovedHash = bm.getHash(index) + + if (key0UnimprovedHash == originalHash && key0 == key) { + bm.content(TupleLength * index + 1) = value + } else { + val value0 = bm.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew: MapNode[K, V] = + bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + } + + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHash = subNode.cachedJavaKeySetHashCode + update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash + } else { + insertValue(bm, bitpos, key, originalHash, keyHash, value) + } + case hc: HashCollisionMapNode[K, V] => + val index = hc.indexOf(key) + if (index < 0) { + hc.content = hc.content.appended((key, value)) + } else { + hc.content = hc.content.updated(index, (key, value)) + } + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private[this] def ensureUnaliased() = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private[this] def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashMap[K, V] = + if (rootNode.size == 0) { + HashMap.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashMap(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: (K, V)): this.type = { + ensureUnaliased() + val h = elem._1.## + val im = improve(h) + update(rootNode, elem._1, elem._2, h, im, 0) + this + } + + def addOne(key: K, value: V): this.type = { + ensureUnaliased() + val originalHash = key.## + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, hash, 0) + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + ensureUnaliased() + xs match { + case hm: HashMap[K, V] => + new ChampBaseIterator[MapNode[K, V]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + mapNode = rootNode, + key = currentValueNode.getKey(currentValueCursor), + value = currentValueNode.getValue(currentValueCursor), + originalHash = originalHash, + keyHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + } + case hm: collection.mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case lhm: collection.mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case thatMap: Map[K, V] => + thatMap.foreachEntry((key, value) => addOne(key, value)) + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala new file mode 100644 index 000000000000..459fcf1682aa --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala @@ -0,0 +1,2123 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.lang.Integer.{bitCount, numberOfTrailingZeros} +import java.lang.System.arraycopy + +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 + +/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam A the type of the elements contained in this hash set. + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) + extends AbstractSet[A] + with StrictOptimizedSetOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with DefaultSerializable { + + def this() = this(SetNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = + if (rootNode eq newRootNode) this else new HashSet(newRootNode) + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + def iterator: Iterator[A] = { + if (isEmpty) Iterator.empty + else new SetIterator[A](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + def contains(element: A): Boolean = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + rootNode.contains(element, elementUnimprovedHash, elementHash, 0) + } + + def incl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + def excl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + override def concat(that: IterableOnce[A]): HashSet[A] = + that match { + case hs: HashSet[A] => + if (isEmpty) hs + else { + val newNode = rootNode.concat(hs.rootNode, 0) + if (newNode eq hs.rootNode) hs + else newHashSetOrThis(newNode) + } + case hs: collection.mutable.HashSet[A] => + val iter = hs.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case lhs: collection.mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case _ => + val iter = that.iterator + var current = rootNode + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + current = current.updated(element, originalHash, improved, 0) + + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + } + + override def tail: HashSet[A] = this - head + + override def init: HashSet[A] = this - last + + override def head: A = iterator.next() + + override def last: A = reverseIterator.next() + + override def foreach[U](f: A => U): Unit = rootNode.foreach(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ + @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set + * Stops iterating the first time that f returns `false`.*/ + @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) + + def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match { + case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) + case _ => super.subsetOf(that) + } + + override def equals(that: Any): Boolean = + that match { + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) + case _ => super.equals(that) + } + + override protected[this] def className = "HashSet" + + override def hashCode(): Int = { + val it = new SetHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) + //assert(hash == super.hashCode()) + hash + } + + override def diff(that: collection.Set[A]): HashSet[A] = { + if (isEmpty) { + this + } else { + that match { + case hashSet: HashSet[A] => + if (hashSet.isEmpty) this else { + val newRootNode = rootNode.diff(hashSet.rootNode, 0) + if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) + } + case hashSet: collection.mutable.HashSet[A] => + val iter = hashSet.nodeIterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next.key, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + + case other => + val thatKnownSize = other.knownSize + + if (thatKnownSize == 0) { + this + } else if (thatKnownSize <= size) { + /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so + we're likely to be the faster of the two at that. */ + removedAllWithShallowMutations(other) + } else { + // TODO: Develop more sophisticated heuristic for which branch to take + filterNot(other.contains) + } + } + + } + } + + /** Immutably removes all elements of `that` from this HashSet + * + * Mutation is used internally, but only on root SetNodes which this method itself creates. + * + * That is, this method is safe to call on published sets because it does not mutate `this` + */ + private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { + val iter = that.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + } + + override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { + case set: scala.collection.Set[A] => diff(set) + case range: Range if range.length > size => + filter { + case i: Int => !range.contains(i) + case _ => true + } + + case _ => + removedAllWithShallowMutations(that) + } + + override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.partition(p) + } + + override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.span(p) + } + + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashSet.empty + else new HashSet(newRootNode) + } + + override def intersect(that: collection.Set[A]): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.intersect(that) + } + + override def take(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.take(n) + } + + override def takeRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeRight(n) + } + + override def takeWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeWhile(p) + } + + override def drop(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.drop(n) + } + + override def dropRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropRight(n) + } + + override def dropWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropWhile(p) + } +} + +private[immutable] object SetNode { + + private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] + + final val TupleLength = 1 + +} + +private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): SetNode[A] + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): A + + def size: Int + + def foreach[U](f: A => U): Unit + + def subsetOf(that: SetNode[A], shift: Int): Boolean + + def copy(): SetNode[A] + + def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] + + def diff(that: SetNode[A], shift: Int): SetNode[A] + + def concat(that: SetNode[A], shift: Int): SetNode[A] + + def foreachWithHash(f: (A, Int) => Unit): Unit + + def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean +} + +private final class BitmapIndexedSetNode[A]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends SetNode[A] { + + import Node._ + import SetNode._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new SetIterator[A](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[SetNode[_]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[SetNode[_]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getPayload(index: Int): A = content(index).asInstanceOf[A] + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] + + def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + return originalHashes(index) == originalHash && element == this.getPayload(index) + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) + } + + false + } + + def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { + return this + } else { + val element0UnimprovedHash = getHash(index) + val element0Hash = improve(element0UnimprovedHash) + if (originalHash == element0UnimprovedHash && element0 == element) { + return this + } else { + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) + } + } + } + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNode eq subNodeNew) { + return this + } else { + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + copyAndInsertValue(bitpos, element, originalHash, elementHash) + } + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated value is located in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated value is located, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = getPayload(index) + val element0UnimprovedHash = getHash(index) + if (element0UnimprovedHash == originalHash && element0 == element) { + shallowlyMutableNodeMap + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode + + var returnNodeMap = shallowlyMutableNodeMap + + val subNodeNew: SetNode[A] = subNode match { + case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNodeNew ne subNode) { + returnNodeMap |= bitpos + } + subNodeNew + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + returnNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = element + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = dstHashes + this.size += 1 + this.cachedJavaKeySetHashCode += elementHash + shallowlyMutableNodeMap + } + } + + + def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) + if (index == 0) + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) + else + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) + } + else return copyAndRemoveValue(bitpos, elementHash) + } else return this + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] + } else { + // inline value (move to front) + return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + this + } + /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new + * node + * + * Should only be called on root nodes, because shift is assumed to be 0 + * + * @param element the element to remove + * @param originalHash the original hash of `element` + * @param elementHash the improved hash of `element` + */ + def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { + val mask = maskFrom(elementHash, 0) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + val newDataMap = dataMap ^ bitpos + if (index == 0) { + val newContent = Array[Any](getPayload(1)) + val newOriginalHashes = Array(originalHashes(1)) + val newCachedJavaKeySetHashCode = improve(getHash(1)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } else { + val newContent = Array[Any](getPayload(0)) + val newOriginalHashes = Array(originalHashes(0)) + val newCachedJavaKeySetHashCode = improve(getHash(0)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } + this.dataMap = newDataMap + this.nodeMap = 0 + this.size = 1 + this + } + else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = this.dataMap ^ bitpos + this.content = dst + this.originalHashes = dstHashes + this.size -= 1 + this.cachedJavaKeySetHashCode -= elementHash + this + } + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] + + if (subNodeNew eq subNode) return this + + if (subNodeNew.size == 1) { + if (this.payloadArity == 0 && this.nodeArity == 1) { + this.dataMap = subNodeNew.dataMap + this.nodeMap = subNodeNew.nodeMap + this.content = subNodeNew.content + this.originalHashes = subNodeNew.originalHashes + this.size = subNodeNew.size + this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode + this + } else { + migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) + this + } + } else { + // size must be > 1 + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size -= 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + this + } + } else this + } + + def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + val newCachedHashCode = keyHash0 + keyHash1 + + if (mask0 < mask1) { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) + } else { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) + + new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedSetNode[A]( + dataMap = dataMap, + nodeMap = nodeMap, + content = dst, + originalHashes = originalHashes, + size = size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + 1) + + // copy 'src' and insert 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + 1, src.length - idx) + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) + } + + def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = key + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - 1) + + // copy 'src' and remove 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) + } + + def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode + ) + } + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the element currently at `bitpos` + * @param node the node to place at `bitpos` + */ + def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) + content(idxNew) = node + + this.dataMap = this.dataMap ^ bitpos + this.nodeMap = this.nodeMap | bitpos + this.originalHashes = removeElement(originalHashes, dataIx) + this.size = this.size - 1 + node.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = node.getPayload(0) + arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedSetNode[A]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the node to migrate inline + * @param oldNode the node currently stored at position `bitpos` + * @param node the node containing the single element to migrate inline + */ + def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val element = node.getPayload(0) + arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) + content(dataIxNew) = element + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + + this.dataMap = this.dataMap | bitpos + this.nodeMap = this.nodeMap ^ bitpos + this.originalHashes = dstHashes + this.size = this.size - oldNode.size + 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + } + + def foreach[U](f: A => U): Unit = { + val thisPayloadArity = payloadArity + var i = 0 + while (i < thisPayloadArity) { + f(getPayload(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity) { + getNode(j).foreach(f) + j += 1 + } + } + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case _: HashCollisionSetNode[A] => false + case node: BitmapIndexedSetNode[A] => + val thisBitmap = this.dataMap | this.nodeMap + val nodeBitmap = node.dataMap | node.nodeMap + + if ((thisBitmap | nodeBitmap) != nodeBitmap) + return false + + var bitmap = thisBitmap & nodeBitmap + var bitsToSkip = numberOfTrailingZeros(bitmap) + + var isValidSubset = true + while (isValidSubset && bitsToSkip < HashCodeLength) { + val bitpos = bitposFrom(bitsToSkip) + + isValidSubset = + if ((this.dataMap & bitpos) != 0) { + if ((node.dataMap & bitpos) != 0) { + // Data x Data + val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) + val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) + payload0 == payload1 + } else { + // Data x Node + val thisDataIndex = indexFrom(this.dataMap, bitpos) + val payload = this.getPayload(thisDataIndex) + val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) + val elementUnimprovedHash = getHash(thisDataIndex) + val elementHash = improve(elementUnimprovedHash) + subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) + } + } else { + // Node x Node + val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) + val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) + subNode0.subsetOf(subNode1, shift + BitPartitionSize) + } + + val newBitmap = bitmap ^ bitpos + bitmap = newBitmap + bitsToSkip = numberOfTrailingZeros(newBitmap) + } + isValidSubset + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else SetNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + SetNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex) = content(oldIndex) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + + // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, + // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in + // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may + // return at runtime a SetNode[A], or a tuple of (A, Int, Int) + + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + } + + override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) this + else if (size == 1) { + val h = getHash(0) + if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val originalHash = getHash(dataIndex) + val hash = improve(originalHash) + + if (!bm.contains(payload, originalHash, hash, shift)) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += hash + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + + val newSubNode: SetNode[A] = + if ((bitpos & bm.dataMap) != 0) { + val thatDataIndex = indexFrom(bm.dataMap, bitpos) + val thatPayload = bm.getPayload(thatDataIndex) + val thatOriginalHash = bm.getHash(thatDataIndex) + val thatHash = improve(thatOriginalHash) + oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) + } else if ((bitpos & bm.nodeMap) != 0) { + oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) + } else { + oldSubNode + } + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + case _: HashCollisionSetNode[A] => + // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the + // same depth + throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") + } + + /** Utility method only for use in `diff` and `filterImpl` + * + * @param newSize the size of the new SetNode + * @param newDataMap the dataMap of the new SetNode + * @param newNodeMap the nodeMap of the new SetNode + * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new + * SetNode + * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new + * SetNode + * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode + * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, + * but which were nodes in `this` + * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated + * to data, in positions in the `nodeMigrateToDataTargetMap` + * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode + * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode + * @param newCachedHashCode the cached java keyset hashcode of the new SetNode + */ + private[this] def newNodeFrom( + newSize: Int, + newDataMap: Int, + newNodeMap: Int, + minimumIndex: Int, + oldDataPassThrough: Int, + nodesToPassThroughMap: Int, + nodeMigrateToDataTargetMap: Int, + nodesToMigrateToData: mutable.Queue[SetNode[A]], + mapOfNewNodes: Int, + newNodes: mutable.Queue[SetNode[A]], + newCachedHashCode: Int): BitmapIndexedSetNode[A] = { + if (newSize == 0) { + SetNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex) = getPayload(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(newDataIndex) = node.getPayload(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedSetNode[_] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy(): BitmapIndexedSetNode[A] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() + i += 1 + } + new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) + } + + // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `this` + var anyChangesMadeSoFar = false + + // bitmap containing `1` in any position that has any descendant in either left or right, either data or node + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataLeftOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { + leftDataRightDataLeftOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { + // nothing from `bm` will make it into the result -- return early + return this + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val leftNode = getNode(leftNodeIdx) + val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) + if (leftNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftPayload = getPayload(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + val leftNode = getNode(leftNodeIdx) + val updated = leftNode.updated( + element = bm.getPayload(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift + ) + if (updated ne leftNode) { + anyChangesMadeSoFar = true + } + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + val originalHash = originalHashes(leftDataIdx) + newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedSetNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else this + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + val thisPayloadArity = payloadArity + var pass = true + var i = 0 + while (i < thisPayloadArity && pass) { + pass &&= f(getPayload(i), getHash(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity && pass) { + pass &&= getNode(j).foreachWithHashWhile(f) + j += 1 + } + pass + } +} + +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A]) extends SetNode[A] { + + import Node._ + + require(content.length >= 2) + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && content.contains(element) + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (this.contains(element, originalHash, hash, shift)) { + this + } else { + new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) + } + + /** + * Remove an element from the hash collision node. + * + * When after deletion only one element remains, we return a bit-mapped indexed node with a + * singleton element and a hash-prefix for trie level 0. This node will be then a) either become + * the new root, or b) unwrapped and inlined deeper in the trie. + */ + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (!this.contains(element, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(element0 => element0 == element) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) + case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): SetNode[A] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getPayload(index: Int): A = content(index) + + override def getHash(index: Int): Int = originalHash + + def size: Int = content.length + + def foreach[U](f: A => U): Unit = { + val iter = content.iterator + while (iter.hasNext) { + f(iter.next()) + } + } + + + override def cachedJavaKeySetHashCode: Int = size * hash + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case node: HashCollisionSetNode[A] => + this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) + case _ => + false + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + SetNode.empty + } else if (newContentLength == 1) { + new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) + } else if (newContent.length == content.length) this + else new HashCollisionSetNode(originalHash, hash, newContent) + } + + override def diff(that: SetNode[A], shift: Int): SetNode[A] = + filterImpl(that.contains(_, originalHash, hash, shift), true) + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionSetNode[_] => + (this eq node) || + (this.hash == node.hash) && + (this.content.size == node.content.size) && + this.content.forall(node.content.contains) + case _ => false + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy() = new HashCollisionSetNode[A](originalHash, hash, content) + + override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { + case hc: HashCollisionSetNode[A] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[A] = null + val iter = hc.content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (!content.contains(nextPayload)) { + if (newContent eq null) { + newContent = new VectorBuilder() + newContent.addAll(this.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedSetNode[A] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next.asInstanceOf[A], originalHash) + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + var stillGoing = true + val iter = content.iterator + while (iter.hasNext && stillGoing) { + val next = iter.next() + stillGoing &&= f(next.asInstanceOf[A], originalHash) + } + stillGoing + } +} + +private final class SetIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class SetReverseIterator[A](rootNode: SetNode[A]) + extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next(): A = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } + +} + +private final class SetHashIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] { + private[this] var hash = 0 + override def hashCode(): Int = hash + + def next(): AnyRef = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + currentValueCursor += 1 + this + } + +} + + +/** + * $factoryInfo + * + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + @transient + private final val EmptySet = new HashSet(SetNode.empty) + + def empty[A]: HashSet[A] = + EmptySet.asInstanceOf[HashSet[A]] + + def from[A](source: collection.IterableOnce[A]): HashSet[A] = + source match { + case hs: HashSet[A] => hs + case _ if source.knownSize == 0 => empty[A] + case _ => (newBuilder[A] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder +} + +/** Builder for HashSet. + * $multipleResults + */ +private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { + import Node._ + import SetNode._ + + private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashSet as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashSet[A] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedSetNode[A] = newEmptyRootNode + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap = bm.dataMap | bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ + private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + bm.content(idx) = elem + } + + def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = + setNode match { + case bm: BitmapIndexedSetNode[A] => + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val element0 = bm.getPayload(index) + val element0UnimprovedHash = bm.getHash(index) + + if (element0UnimprovedHash == originalHash && element0 == element) { + setValue(bm, bitpos, element0) + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + } + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHashCode = subNode.cachedJavaKeySetHashCode + update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode + } else { + insertValue(bm, bitpos, element, originalHash, elementHash) + } + case hc: HashCollisionSetNode[A] => + val index = hc.content.indexOf(element) + if (index < 0) { + hc.content = hc.content.appended(element) + } else { + hc.content = hc.content.updated(index, element) + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private def ensureUnaliased():Unit = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashSet[A] = + if (rootNode.size == 0) { + HashSet.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashSet(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: A): this.type = { + ensureUnaliased() + val h = elem.## + val im = improve(h) + update(rootNode, elem, h, im, 0) + this + } + + override def addAll(xs: IterableOnce[A]) = { + ensureUnaliased() + xs match { + case hm: HashSet[A] => + new ChampBaseIterator[SetNode[A]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + setNode = rootNode, + element = currentValueNode.getPayload(currentValueCursor), + originalHash = originalHash, + elementHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + } + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala new file mode 100644 index 000000000000..240821b11460 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala @@ -0,0 +1,502 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions + +/** Utility class for integer maps. + */ +private[immutable] object IntMapUtils extends BitOperations.Int { + def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) + else IntMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { + case (left, IntMap.Nil) => left + case (IntMap.Nil, right) => right + case (left, right) => IntMap.Bin(prefix, mask, left, right) + } +} + +import IntMapUtils._ + +/** A companion object for integer maps. + * + * @define Coll `IntMap` + */ +object IntMap { + def empty[T] : IntMap[T] = IntMap.Nil + + def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) + + def apply[T](elems: (Int, T)*): IntMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Int, V)]): IntMap[V] = + newBuilder[V].addAll(coll).result() + + private[immutable] case object Nil extends IntMap[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any) = that match { + case _: this.type => true + case _: IntMap[_] => false // The only empty IntMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] + else IntMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { + def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] + else IntMap.Bin[S](prefix, mask, left, right) + } + } + + def newBuilder[V]: Builder[(Int, V), IntMap[V]] = + new ImmutableBuilder[(Int, V), IntMap[V]](empty) { + def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } + } + + implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Int, AnyRef)]): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]) = IntMap.from(it) + def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) + implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty IntMap. +private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and + // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 and + var index = 0 + var buffer = new Array[AnyRef](33) + + def pop = { + index -= 1 + buffer(index).asInstanceOf[IntMap[V]] + } + + def push(x: IntMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: IntMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop match { + case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case IntMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@IntMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap + // and don't return an IntMapIterator for IntMap.Nil. + case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { + def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.value +} + +private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.key +} + +import IntMap._ + +/** Specialised immutable map structure for integer keys, based on + * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * '''Note:''' This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with integer keys. + * + * @define Coll `immutable.IntMap` + * @define coll immutable integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class IntMap[+T] extends AbstractMap[Int, T] + with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]): IntMap[T] = + intMapFrom[T](coll) + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]): IntMap[V2] = { + val b = IntMap.newBuilder[V2] + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Int, T), IntMap[T]](empty) { + def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } + } + + override def empty: IntMap[T] = IntMap.Nil + + override def toList = { + val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of integer keys and corresponding values. + */ + def iterator: Iterator[(Int, T)] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Int, T)) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case IntMap.Tip(key, value) => f((key, value)) + case IntMap.Nil => + } + + override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case IntMap.Tip(key, value) => f(key, value) + case IntMap.Nil => + } + + override def keysIterator: Iterator[Int] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Int => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case IntMap.Tip(key, _) => f(key) + case IntMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as `values.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case IntMap.Tip(_, value) => f(value) + case IntMap.Nil => + } + + override protected[this] def className = "IntMap" + + override def isEmpty = this eq IntMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case IntMap.Tip(key, value) => + if (f((key, value))) this + else IntMap.Nil + case IntMap.Nil => IntMap.Nil + } + + override def transform[S](f: (Int, T) => S): IntMap[S] = this match { + case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) + case IntMap.Nil => IntMap.Nil + } + + final override def size: Int = this match { + case IntMap.Nil => 0 + case IntMap.Tip(_, _) => 1 + case IntMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Int): Option[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None + case IntMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Int, default: => S): S = this match { + case IntMap.Nil => default + case IntMap.Tip(key2, value) => if (key == key2) value else default + case IntMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Int): T = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case IntMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) + else IntMap.Bin(prefix, mask, left, right.updated(key, value)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, value) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) + + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) + + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = + super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = + strictOptimizedCollect(IntMap.newBuilder[V2], pf) + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to: + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update + * @param value The value to use if there is no conflict + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, f(value2, value)) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def removed (key: Int): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case IntMap.Tip(key2, _) => + if (key == key2) IntMap.Nil + else this + case IntMap.Nil => IntMap.Nil + } + + /** + * A combined transform and filter function. Returns an `IntMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] + else bin(prefix, mask, newleft, newright) + case IntMap.Tip(key, value) => f(key, value) match { + case None => + IntMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] + else IntMap.Tip(key, value2) + } + case IntMap.Nil => + IntMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ + case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) + case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (IntMap.Nil, x) => x + case (x, IntMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { + case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) IntMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) IntMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (IntMap.Tip(key, value), that) => that.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value, value2)) + } + case (_, IntMap.Tip(key, value)) => this.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value2, value)) + } + case (_, _) => IntMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings + * as this but only for keys which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: IntMap[R]): IntMap[T] = + this.intersectionWith(that, (key: Int, value: T, value2: R) => value) + + def ++[S >: T](that: IntMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + /** + * The entry with the lowest key value considered in unsigned order. + */ + @tailrec + final def firstKey: Int = this match { + case Bin(_, _, l, r) => l.firstKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + /** + * The entry with the highest key value considered in unsigned order. + */ + @tailrec + final def lastKey: Int = this match { + case Bin(_, _, l, r) => r.lastKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index 44f13d0f2895..d4199ab3ab14 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -13,7 +13,6 @@ package scala.collection.immutable import scala.collection.{IterableFactory, IterableFactoryDefaults} -import language.experimental.captureChecking /** A trait for collections that are guaranteed immutable. * @@ -25,7 +24,6 @@ import language.experimental.captureChecking trait Iterable[+A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { - this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyList.scala new file mode 100644 index 000000000000..8b7ad26dc5ae --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LazyList.scala @@ -0,0 +1,1381 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} +import scala.language.implicitConversions +import scala.runtime.Statics + +/** This class implements an immutable linked list. We call it "lazy" + * because it computes its elements only when they are needed. + * + * Elements are memoized; that is, the value of each element is computed at most once. + * + * Elements are computed in-order and are never skipped. In other words, + * accessing the tail causes the head to be computed first. + * + * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * don't know yet whether the list is empty or not. If you learn that it is non-empty, + * then you also know that the head has been computed. But the tail is itself + * a `LazyList`, whose emptiness-or-not might remain undetermined. + * + * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, + * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. + * + * Here is an example: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } + * fibs.take(5).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * }}} + * + * To illustrate, let's add some output to the definition `fibs`, so we + * see what's going on. + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: + * fibs.zip(fibs.tail).map{ n => + * println(s"Adding \${n._1} and \${n._2}") + * n._1 + n._2 + * } + * fibs.take(5).foreach(println) + * fibs.take(6).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 + * // 1 + * // Adding 1 and 1 + * // 2 + * // Adding 1 and 2 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 + * // 5 + * }}} + * + * Note that the definition of `fibs` uses `val` not `def`. The memoization of the + * `LazyList` requires us to have somewhere to store the information and a `val` + * allows us to do that. + * + * Further remarks about the semantics of `LazyList`: + * + * - Though the `LazyList` changes as it is accessed, this does not + * contradict its immutability. Once the values are memoized they do + * not change. Values that have yet to be memoized still "exist", they + * simply haven't been computed yet. + * + * - One must be cautious of memoization; it can eat up memory if you're not + * careful. That's because memoization of the `LazyList` creates a structure much like + * [[scala.collection.immutable.List]]. As long as something is holding on to + * the head, the head holds on to the tail, and so on recursively. + * If, on the other hand, there is nothing holding on to the head (e.g. if we used + * `def` to define the `LazyList`) then once it is no longer being used directly, + * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. + * + * Here's another example. Let's start with the natural numbers and iterate + * over them. + * + * {{{ + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyList definition will be a val definition + * val lazylist1: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * val it1 = lazylist1.iterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyList such that all we have is the Iterator left + * // and allow the LazyList to be garbage collected as required. Using a def + * // to provide the LazyList ensures that no val is holding onto the head as + * // is the case with lazylist1 + * def lazylist2: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.iterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyList at all for such a simple + * // problem. There's no reason to use a LazyList if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. + * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known, then the act + * of determining `tail` would require the evaluation of `tail`, so the + * computation would be unable to progress, as in this code: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyList[Int] = { + * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * The head, the tail and whether the list is empty or not can be initially unknown. + * Once any of those are evaluated, they are all known, though if the tail is + * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating + * the tails content is deferred until the tails empty status, head or tail is + * evaluated. + * + * Delaying the evaluation of whether a LazyList is empty or not until it's needed + * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * + * Only when it's further evaluated (which may be never!) any of the elements gets + * forced. + * + * for example: + * + * {{{ + * def tailWithSideEffect: LazyList[Nothing] = { + * println("getting empty LazyList") + * LazyList.empty + * } + * + * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * + * val suspended = 1 #:: tailWithSideEffect // doesn't print anything + * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed + * val filtered = tail.filter(_ => false) // still nothing is printed + * filtered.isEmpty // prints "getting empty LazyList" + * }}} + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for more information. + * @define Coll `LazyList` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(3L) +final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, LazyList, LazyList[A]] + with IterableFactoryDefaults[A, LazyList] + with Serializable { + import LazyList._ + + @volatile private[this] var stateEvaluated: Boolean = false + @inline private def stateDefined: Boolean = stateEvaluated + private[this] var midEvaluation = false + + private lazy val state: State[A] = { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (midEvaluation) { + throw new RuntimeException("self-referential LazyList or a derivation thereof has no more elements") + } + midEvaluation = true + val res = try lazyState() finally midEvaluation = false + // if we set it to `true` before evaluating, we may infinite loop + // if something expects `state` to already be evaluated + stateEvaluated = true + lazyState = null // allow GC + res + } + + override def iterableFactory: SeqFactory[LazyList] = LazyList + + override def isEmpty: Boolean = state eq State.Empty + + /** @inheritdoc + * + * $preservesLaziness + */ + override def knownSize: Int = if (knownIsEmpty) 0 else -1 + + override def head: A = state.head + + override def tail: LazyList[A] = state.tail + + @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) + @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyList(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyList[A] = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A] = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyList as elements + * are consumed. + * @note This function will force the realization of the entire LazyList + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyList specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // State.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef = + if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + + override protected[this] def className = "LazyList" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case coll if coll.knownSize == 0 => State.Empty + case coll => stateFromIterator(coll.iterator) + } + else sCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(suffix) + else lazyAppendedAll(suffix) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appended[B >: A](elem: B): LazyList[B] = + if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + else lazyAppendedAll(Iterator.single(elem)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = + if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + else newLL(scanLeftState(z)(op)) + + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + sCons( + z, + newLL { + if (isEmpty) State.Empty + else tail.scanLeftState(op(z, head))(op) + } + ) + + /** LazyList specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: LazyList[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + val (left, right) = map(f).partition(_.isLeft) + (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = + new LazyList.WithFilter(coll, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(prefix) + else if (prefix.knownSize == 0) this + else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B](f: A => B): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else (mapImpl(f): @inline) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + + private def mapImpl[B](f: A => B): LazyList[B] = + newLL { + if (isEmpty) State.Empty + else sCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.collectImpl(this, pf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = + if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty + else newLL(zipState(that.iterator)) + + private def zipState[B](it: Iterator[B]): State[(A, B)] = + if (this.isEmpty || !it.hasNext) State.Empty + else sCons((head, it.next()), newLL { tail zipState it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + if (this.knownIsEmpty) { + if (that.knownSize == 0) LazyList.empty + else LazyList.continually(thisElem) zip that + } else { + if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + else newLL(zipAllState(that.iterator, thisElem, thatElem)) + } + } + + private def zipAllState[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): State[(A1, B)] = { + if (it.hasNext) { + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) + else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) + } else { + if (this.isEmpty) State.Empty + else sCons((this.head, thatElem), this.tail zip LazyList.continually(thatElem)) + } + } + + /** @inheritdoc + * + * This method is not particularly useful for a lazy list, as [[zip]] already preserves + * laziness. + * + * The `collection.LazyZip2` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + // just in case it can be meaningfully overridden at some point + override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, LazyList.this.type] = + super.lazyZip(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + (map(asPair(_)._1), map(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyList[A] = + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else LazyList.dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyList[A] = { + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + dropRightState(scout) + } + } + + private def dropRightState(scout: LazyList[_]): State[A] = + if (scout.isEmpty) State.Empty + else sCons(head, newLL(tail.dropRightState(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeImpl(n): @inline) + + private def takeImpl(n: Int): LazyList[A] = { + if (n <= 0) LazyList.empty + else newLL { + if (isEmpty) State.Empty + else sCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeWhileImpl(p): @inline) + + private def takeWhileImpl(p: A => Boolean): LazyList[A] = + newLL { + if (isEmpty || !p(head)) State.Empty + else sCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyList[A] = + if (n <= 0 || knownIsEmpty) LazyList.empty + else LazyList.takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + + // need contravariant type B to make the compiler happy - still returns LazyList[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(sCons(head, tl))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else super.diff(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else super.intersect(that) + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyList[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def padTo[B >: A](len: Int, elem: B): LazyList[B] = { + if (len <= 0) this + else newLL { + if (isEmpty) LazyList.fill(len)(elem).state + else sCons(head, tail.padTo(len - 1, elem)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + if (knownIsEmpty) LazyList from other + else patchImpl(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + newLL { + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) + else if (isEmpty) stateFromIterator(other.iterator) + else sCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $evaluatesAllElements + */ + // overridden just in case a lazy implementation is developed at some point + override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + + /** @inheritdoc + * + * $preservesLaziness + */ + override def updated[B >: A](index: Int, elem: B): LazyList[B] = + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + newLL { + if (index <= 0) sCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { + b.append(start) + if (!stateDefined) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + var scout = tail + @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { + cursor = scout + if (scoutNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scoutNonEmpty) scout = scout.tail + } + } + } + if (!scoutNonEmpty) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.stateDefined) b.append(sep).append("") + } else { + @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (!same(runner, scout)) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if (same(cursor, scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (!same(cursor, scout)) { + appendCursorElement() + cursor = cursor.tail + } + b.append(sep).append("") + } + } + b.append(end) + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = { + if (!stateDefined) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyList` + */ +@SerialVersionUID(3L) +object LazyList extends SeqFactory[LazyList] { + // Eagerly evaluate cached empty instance + private[this] val _empty = newLL(State.Empty).force + + private sealed trait State[+A] extends Serializable { + def head: A + def tail: LazyList[A] + } + + private object State { + @SerialVersionUID(3L) + object Empty extends State[Nothing] { + def head: Nothing = throw new NoSuchElementException("head of empty lazy list") + def tail: LazyList[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + } + + @SerialVersionUID(3L) + final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + } + + /** Creates a new LazyList. */ + @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + + /** Creates a new State.Cons. */ + @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + + private val anyToMarker: Any => Any = _ => Statics.pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyList`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty + } + } + + private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + val marker = Statics.pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) State.Empty + else sCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var it: Iterator[B] = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).iterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) + } else State.Empty + } + } + + private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest.state + } + } + + private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest.state + } + } + + private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while(!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest.state + } + } + + /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + */ + object cons { + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + } + + implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + /** Construct a LazyList consisting of a given first element followed by elements + * from another LazyList. + */ + def #:: [B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) + /** Construct a LazyList consisting of the concatenation of the given LazyList and + * another LazyList. + */ + def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { + case lazyList: LazyList[A] => lazyList + case _ if coll.knownSize == 0 => empty[A] + case _ => newLL(stateFromIterator(coll.iterator)) + } + + def empty[A]: LazyList[A] = _empty + + /** Creates a State from an Iterator, with another State appended after the Iterator + * is empty. + */ + private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) + else suffix + + /** Creates a State from an IterableOnce. */ + private def stateFromIterator[A](it: Iterator[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) + else State.Empty + + override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + if (xss.knownSize == 0) empty + else newLL(concatIterator(xss.iterator)) + + private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + if (!it.hasNext) State.Empty + else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) + + /** An infinite LazyList that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyList + * @param f the function that's repeatedly applied + * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyList[A] = + newLL { + val head = start + sCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyList starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyList + * @param step the increment value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int, step: Int): LazyList[Int] = + newLL(sCons(start, from(start + step, step))) + + /** + * Create an infinite LazyList starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int): LazyList[Int] = from(start, 1) + + /** + * Create an infinite LazyList containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyList + * @return the LazyList containing an infinite number of elem + */ + def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyList[A] = + if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { + def at(index: Int): LazyList[A] = + if (index < n) newLL(sCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + newLL { + f(init) match { + case Some((elem, state)) => sCons(elem, unfold(state)(f)) + case None => State.Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + extends AbstractIterator[LazyList[A]] { + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyList[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) + extends collection.WithFilter[A, LazyList] { + private[this] val filtered = lazyList.filter(p) + def map[B](f: A => B): LazyList[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + } + + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyList[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyList[A] = { + next init State.Empty + list + } + + override def addOne(elem: A): this.type = { + val deferred = new DeferredState[A] + next init sCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def addAll(xs: IterableOnce[A]): this.type = { + if (xs.knownSize != 0) { + val deferred = new DeferredState[A] + next init stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()) + next = deferred + } + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + private[this] var _state: () => State[A] = _ + + def eval(): State[A] = { + val state = _state + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => State[A]): Unit = { + if (_state != null) throw new IllegalStateException("already initialized") + _state = () => state + } + } + } + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new mutable.ListBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyList[A]] + // scala/scala#10118: caution that no code path can evaluate `tail.state` + // before the resulting LazyList is returned + val it = init.toList.iterator + coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) + } + + private[this] def readResolve(): Any = coll + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/List.scala b/tests/pos-special/stdlib/collection/immutable/List.scala index 6a305f4ebdec..5358922752fb 100644 --- a/tests/pos-special/stdlib/collection/immutable/List.scala +++ b/tests/pos-special/stdlib/collection/immutable/List.scala @@ -14,12 +14,11 @@ package scala package collection package immutable -import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} +import scala.annotation.unchecked.uncheckedVariance import scala.annotation.tailrec import mutable.{Builder, ListBuffer} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence -import language.experimental.captureChecking /** A class for immutable linked lists representing ordered collections * of elements of type `A`. @@ -144,7 +143,7 @@ sealed abstract class List[+A] override def prepended[B >: A](elem: B): List[B] = elem :: this - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): List[B] = prefix match { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): List[B] = prefix match { case xs: List[B] => xs ::: this case _ if prefix.knownSize == 0 => this case b: ListBuffer[B] if this.isEmpty => b.toList @@ -166,7 +165,7 @@ sealed abstract class List[+A] } // When calling appendAll with another list `suffix`, avoid copying `suffix` - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): List[B] = suffix match { + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): List[B] = suffix match { case xs: List[B] => this ::: xs case _ => super.appendedAll(suffix) } @@ -215,7 +214,7 @@ sealed abstract class List[+A] // dropRight is inherited from LinearSeq override def splitAt(n: Int): (List[A], List[A]) = { - val b = new ListBuffer[A @uncheckedCaptures] + val b = new ListBuffer[A] var i = 0 var these = this while (!these.isEmpty && i < n) { @@ -258,7 +257,7 @@ sealed abstract class List[+A] } } - final override def collect[B](pf: PartialFunction[A, B]^): List[B] = { + final override def collect[B](pf: PartialFunction[A, B]): List[B] = { if (this eq Nil) Nil else { var rest = this var h: ::[B] = null @@ -286,7 +285,7 @@ sealed abstract class List[+A] } } - final override def flatMap[B](f: A => IterableOnce[B]^): List[B] = { + final override def flatMap[B](f: A => IterableOnce[B]): List[B] = { var rest = this var h: ::[B] = null var t: ::[B] = null @@ -307,7 +306,7 @@ sealed abstract class List[+A] } @inline final override def takeWhile(p: A => Boolean): List[A] = { - val b = new ListBuffer[A @uncheckedCaptures] + val b = new ListBuffer[A] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -317,7 +316,7 @@ sealed abstract class List[+A] } @inline final override def span(p: A => Boolean): (List[A], List[A]) = { - val b = new ListBuffer[A @uncheckedCaptures] + val b = new ListBuffer[A] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -652,7 +651,7 @@ sealed abstract class List[+A] // Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or // before a newly-allocated, thread-local :: instance is aliased (e.g. in ListBuffer.toList) -final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance @uncheckedCaptures]) // sound because `next` is used only locally +final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally extends List[A] { releaseFence() override def headOption: Some[A] = Some(head) @@ -667,7 +666,7 @@ case object Nil extends List[Nothing] { override def init: Nothing = throw new UnsupportedOperationException("init of empty list") override def knownSize: Int = 0 override def iterator: Iterator[Nothing] = Iterator.empty - override def unzip[A1, A2](implicit asPair: Nothing -> (A1, A2)): (List[A1], List[A2]) = EmptyUnzip + override def unzip[A1, A2](implicit asPair: Nothing => (A1, A2)): (List[A1], List[A2]) = EmptyUnzip @transient private[this] val EmptyUnzip = (Nil, Nil) @@ -682,9 +681,9 @@ case object Nil extends List[Nothing] { object List extends StrictOptimizedSeqFactory[List] { private val TupleOfNil = (Nil, Nil) - def from[B](coll: collection.IterableOnce[B]^): List[B] = Nil.prependedAll(coll) + def from[B](coll: collection.IterableOnce[B]): List[B] = Nil.prependedAll(coll) - def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A @uncheckedCaptures]() + def newBuilder[A]: Builder[A, List[A]] = new ListBuffer() def empty[A]: List[A] = Nil diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala new file mode 100644 index 000000000000..4a2b8dbd807c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala @@ -0,0 +1,371 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.mutable.ReusableBuilder +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 + +/** + * This class implements immutable maps using a list-based data structure. List map iterators and + * traversal methods visit key-value pairs in the order they were first inserted. + * + * Entries are stored internally in reversed insertion order, which means the newest key is at the + * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` + * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes + * this collection suitable only for a small number of elements. + * + * Instances of `ListMap` represent empty maps; they can be either created by calling the + * constructor directly, or by applying the function `ListMap.empty`. + * + * @tparam K the type of the keys contained in this list map + * @tparam V the type of the values associated with the keys + * + * @define Coll ListMap + * @define coll list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListMap[K, +V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + override def size: Int = 0 + + override def isEmpty: Boolean = true + + override def knownSize: Int = 0 + def get(key: K): Option[V] = None + + def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) + + def removed(key: K): ListMap[K, V] = this + + def iterator: Iterator[(K, V)] = { + var curr: ListMap[K, V] = this + var res: List[(K, V)] = Nil + while (curr.nonEmpty) { + res = (curr.key, curr.value) :: res + curr = curr.next + } + res.iterator + } + + override def keys: Iterable[K] = { + var curr: ListMap[K, V] = this + var res: List[K] = Nil + while (curr.nonEmpty) { + res = curr.key :: res + curr = curr.next + } + res + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration + // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. + val _reversed = new immutable.AbstractMap[K, V] { + override def isEmpty: Boolean = ListMap.this.isEmpty + override def removed(key: K): Map[K, V] = ListMap.this.removed(key) + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) + override def get(key: K): Option[V] = ListMap.this.get(key) + override def iterator: Iterator[(K, V)] = ListMap.this.iterator + override def foreachEntry[U](f: (K, V) => U): Unit = { + var curr: ListMap[K, V] = ListMap.this + while (curr.nonEmpty) { + f(curr.key, curr.value) + curr = curr.next + } + } + } + MurmurHash3.mapHash(_reversed) + } + } + + private[immutable] def key: K = throw new NoSuchElementException("key of empty map") + private[immutable] def value: V = throw new NoSuchElementException("value of empty map") + private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") + + override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) + override protected[this] def className = "ListMap" + +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list map with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] + * section on `List Maps` for more information. + * @define Coll ListMap + * @define coll list map + */ +@SerialVersionUID(3L) +object ListMap extends MapFactory[ListMap] { + /** + * Represents an entry in the `ListMap`. + */ + private[immutable] final class Node[K, V]( + override private[immutable] val key: K, + private[immutable] var _value: V, + private[immutable] var _init: ListMap[K, V] + ) extends ListMap[K, V] { + releaseFence() + + override private[immutable] def value: V = _value + + override def size: Int = sizeInternal(this, 0) + + @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = + if (cur.isEmpty) acc + else sizeInternal(cur.next, acc + 1) + + override def isEmpty: Boolean = false + + override def knownSize: Int = -1 + + @throws[NoSuchElementException] + override def apply(k: K): V = applyInternal(this, k) + + @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = + if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) + else if (k == cur.key) cur.value + else applyInternal(cur.next, k) + + override def get(k: K): Option[V] = getInternal(this, k) + + @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = + if (cur.isEmpty) None + else if (k == cur.key) Some(cur.value) + else getInternal(cur.next, k) + + override def contains(k: K): Boolean = containsInternal(this, k) + + @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = + if (cur.isEmpty) false + else if (k == cur.key) true + else containsInternal(cur.next, k) + + override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { + + var index = -1 // the index (in reverse) where the key to update exists, if it is found + var found = false // true if the key is found int he map + var isDifferent = false // true if the key was found and the values are different + + { + var curr: ListMap[K, V] = this + + while (curr.nonEmpty && !found) { + if (k == curr.key) { + found = true + isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] + } + index += 1 + curr = curr.init + } + } + + if (found) { + if (isDifferent) { + var newHead: ListMap.Node[K, V1] = null + var prev: ListMap.Node[K, V1] = null + var curr: ListMap[K, V1] = this + var i = 0 + while (i < index) { + val temp = new ListMap.Node(curr.key, curr.value, null) + if (prev ne null) { + prev._init = temp + } + prev = temp + curr = curr.init + if (newHead eq null) { + newHead = prev + } + i += 1 + } + val newNode = new ListMap.Node(curr.key, v, curr.init) + if (prev ne null) { + prev._init = newNode + } + releaseFence() + if (newHead eq null) newNode else newHead + } else { + this + } + } else { + new ListMap.Node(k, v, this) + } + } + + @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = + if (cur.isEmpty) acc.last + else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } + else removeInternal(k, cur.next, cur :: acc) + + override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) + + override private[immutable] def next: ListMap[K, V] = _init + + override def last: (K, V) = (key, value) + override def init: ListMap[K, V] = next + + } + + def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] + + private object EmptyListMap extends ListMap[Any, Nothing] + + def from[K, V](it: collection.IterableOnce[(K, V)]): ListMap[K, V] = + it match { + case lm: ListMap[K, V] => lm + case lhm: collection.mutable.LinkedHashMap[K, V] => + // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each + // key-value pair + var current: ListMap[K, V] = empty[K, V] + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + current = new Node(firstEntry.key, firstEntry.value, current) + firstEntry = firstEntry.later + } + current + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end + var current: ListMap[K, V] = empty[K, V] + val iter = it.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + current = new Node(k, v, current) + } + current + + case _ => (newBuilder[K, V] ++= it).result() + } + + /** Returns a new ListMap builder + * + * The implementation safely handles additions after `result()` without calling `clear()` + * + * @tparam K the map key type + * @tparam V the map value type + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] + + @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { + if (map.isEmpty) prevValue + else foldRightInternal(map.init, op(map.last, prevValue), op) + } +} + +/** Builder for ListMap. + * $multipleResults + */ +private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { + private[this] var isAliased: Boolean = false + private[this] var underlying: ListMap[K, V] = ListMap.empty + + override def clear(): Unit = { + underlying = ListMap.empty + isAliased = false + } + + override def result(): ListMap[K, V] = { + isAliased = true + releaseFence() + underlying + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + + @tailrec + private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { + case n: ListMap.Node[K, V] => + if (n.key == key) { + n._value = value + true + } else { + insertValueAtKeyReturnFound(n.init, key, value) + } + case _ => false + } + + def addOne(key: K, value: V): this.type = { + if (isAliased) { + underlying = underlying.updated(key, value) + } else { + if (!insertValueAtKeyReturnFound(underlying, key, value)) { + underlying = new ListMap.Node(key, value, underlying) + } + } + this + } + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + if (isAliased) { + super.addAll(xs) + } else if (underlying.nonEmpty) { + xs match { + case m: collection.Map[K, V] => + // if it is a map, then its keys will not collide with themselves. + // therefor we only need to check the already-existing elements for collisions. + // No need to check the entire list + + val iter = m.iterator + var newUnderlying = underlying + while (iter.hasNext) { + val next = iter.next() + if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { + newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) + } + } + underlying = newUnderlying + this + + case _ => + super.addAll(xs) + } + } else xs match { + case lhm: collection.mutable.LinkedHashMap[K, V] => + // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) + firstEntry = firstEntry.later + } + this + + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + val iter = xs.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + underlying = new ListMap.Node(k, v, underlying) + } + + this + case _ => + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala new file mode 100644 index 000000000000..e2ab0de858da --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable + +/** + * This class implements immutable sets using a list-based data structure. List set iterators and + * traversal methods visit elements in the order they were first inserted. + * + * Elements are stored internally in reversed insertion order, which means the newest element is at + * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and + * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which + * makes this collection suitable only for a small number of elements. + * + * Instances of `ListSet` represent empty sets; they can be either created by calling the + * constructor directly, or by applying the function `ListSet.empty`. + * + * @tparam A the type of the elements contained in this list set + * + * @define Coll ListSet + * @define coll list set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListSet[A] + extends AbstractSet[A] + with StrictOptimizedSetOps[A, ListSet, ListSet[A]] + with IterableFactoryDefaults[A, ListSet] + with DefaultSerializable { + + override protected[this] def className: String = "ListSet" + + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + + def contains(elem: A): Boolean = false + + def incl(elem: A): ListSet[A] = new Node(elem) + def excl(elem: A): ListSet[A] = this + + def iterator: scala.collection.Iterator[A] = { + var curr: ListSet[A] = this + var res: List[A] = Nil + while (!curr.isEmpty) { + res = curr.elem :: res + curr = curr.next + } + res.iterator + } + + protected def elem: A = throw new NoSuchElementException("elem of empty set") + protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") + + override def iterableFactory: IterableFactory[ListSet] = ListSet + + /** + * Represents an entry in the `ListSet`. + */ + protected class Node(override protected val elem: A) extends ListSet[A] { + + override def size = sizeInternal(this, 0) + override def knownSize: Int = -1 + @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = + if (n.isEmpty) acc + else sizeInternal(n.next, acc + 1) + + override def isEmpty: Boolean = false + + override def contains(e: A): Boolean = containsInternal(this, e) + + @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = + !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) + + override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) + + override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) + + @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + if (cur.isEmpty) acc.last + else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) + else removeInternal(k, cur.next, cur :: acc) + + override protected def next: ListSet[A] = ListSet.this + + override def last: A = elem + + override def init: ListSet[A] = next + } +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list set with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @define Coll ListSet + * @define coll list set + */ +@SerialVersionUID(3L) +object ListSet extends IterableFactory[ListSet] { + + def from[E](it: scala.collection.IterableOnce[E]): ListSet[E] = + it match { + case ls: ListSet[E] => ls + case _ if it.knownSize == 0 => empty[E] + case _ => (newBuilder[E] ++= it).result() + } + + private object EmptyListSet extends ListSet[Any] { + override def knownSize: Int = 0 + } + private[collection] def emptyInstance: ListSet[Any] = EmptyListSet + + def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] + + def newBuilder[A]: Builder[A, ListSet[A]] = + new ImmutableBuilder[A, ListSet[A]](empty) { + def addOne(elem: A): this.type = { elems = elems + elem; this } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala new file mode 100644 index 000000000000..c418dc7616ac --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala @@ -0,0 +1,490 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.IllegalStateException + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions + +/** Utility class for long maps. + */ +private[immutable] object LongMapUtils extends BitOperations.Long { + def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) + + def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) + else LongMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { + case (left, LongMap.Nil) => left + case (LongMap.Nil, right) => right + case (left, right) => LongMap.Bin(prefix, mask, left, right) + } +} + +import LongMapUtils._ + +/** A companion object for long maps. + * + * @define Coll `LongMap` + */ +object LongMap { + def empty[T]: LongMap[T] = LongMap.Nil + def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) + def apply[T](elems: (Long, T)*): LongMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Long, V)]): LongMap[V] = + newBuilder[V].addAll(coll).result() + + def newBuilder[V]: Builder[(Long, V), LongMap[V]] = + new ImmutableBuilder[(Long, V), LongMap[V]](empty) { + def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } + } + + private[immutable] case object Nil extends LongMap[Nothing] { + // Important, don't remove this! See IntMap for explanation. + override def equals(that : Any) = that match { + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] + else LongMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { + def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] + else LongMap.Bin[S](prefix, mask, left, right) + } + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty LongMap. +private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and + // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 65 + var index = 0 + var buffer = new Array[AnyRef](65) + + def pop() = { + index -= 1 + buffer(index).asInstanceOf[LongMap[V]] + } + + def push(x: LongMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: LongMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop() match { + case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case LongMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@LongMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap + // and don't return an LongMapIterator for LongMap.Nil. + case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ + def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.value +} + +private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.key +} + +/** + * Specialised immutable map structure for long keys, based on + * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * Note: This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with the long keys. + * + * @define Coll `immutable.LongMap` + * @define coll immutable long integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class LongMap[+T] extends AbstractMap[Long, T] + with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T)] @uncheckedVariance): LongMap[T] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Long, T), LongMap[T]](empty) { + def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } + } + + override def empty: LongMap[T] = LongMap.Nil + + override def toList = { + val buffer = new ListBuffer[(Long, T)] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of long keys and corresponding values. + */ + def iterator: Iterator[(Long, T)] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Long, T)) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case LongMap.Tip(key, value) => f((key, value)) + case LongMap.Nil => + } + + override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case LongMap.Tip(key, value) => f(key, value) + case LongMap.Nil => + } + + override def keysIterator: Iterator[Long] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as keys.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Long => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case LongMap.Tip(key, _) => f(key) + case LongMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as values.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case LongMap.Tip(_, value) => f(value) + case LongMap.Nil => + } + + override protected[this] def className = "LongMap" + + override def isEmpty = this eq LongMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => + if (f((key, value))) this + else LongMap.Nil + case LongMap.Nil => LongMap.Nil + } + + override def transform[S](f: (Long, T) => S): LongMap[S] = this match { + case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) + case LongMap.Nil => LongMap.Nil + } + + final override def size: Int = this match { + case LongMap.Nil => 0 + case LongMap.Tip(_, _) => 1 + case LongMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Long): Option[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None + case LongMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Long, default: => S): S = this match { + case LongMap.Nil => default + case LongMap.Tip(key2, value) => if (key == key2) value else default + case LongMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Long): T = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case LongMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) + else LongMap.Bin(prefix, mask, left, right.updated(key, value)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, value) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update. + * @param value The value to use if there is no conflict. + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, f(value2, value)) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + def removed(key: Long): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case LongMap.Tip(key2, _) => + if (key == key2) LongMap.Nil + else this + case LongMap.Nil => LongMap.Nil + } + + /** + * A combined transform and filter function. Returns an `LongMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => f(key, value) match { + case None => LongMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] + else LongMap.Tip(key, value2) + } + case LongMap.Nil => LongMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ + case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) + case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (LongMap.Nil, x) => x + case (x, LongMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { + case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) LongMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) LongMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (LongMap.Tip(key, value), that) => that.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value, value2)) + } + case (_, LongMap.Tip(key, value)) => this.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value2, value)) + } + case (_, _) => LongMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings as this but only for keys + * which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: LongMap[R]): LongMap[T] = + this.intersectionWith(that, (key: Long, value: T, value2: R) => value) + + def ++[S >: T](that: LongMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + @tailrec + final def firstKey: Long = this match { + case LongMap.Bin(_, _, l, r) => l.firstKey + case LongMap.Tip(k, v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + @tailrec + final def lastKey: Long = this match { + case LongMap.Bin(_, _, l, r) => r.lastKey + case LongMap.Tip(k , v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = + super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala new file mode 100644 index 000000000000..9d334893b8cc --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Map.scala @@ -0,0 +1,692 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.Map.Map4 +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** Base type of immutable Maps */ +trait Map[K, +V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]] + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[V1 >: V](d: K => V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) +} + +/** Base trait of immutable Maps implementations + * + * @define coll immutable map + * @define Coll `immutable.Map` + */ +trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] { + + protected def coll: C with CC[K, V] + + /** Removes a key from this map, returning a new map. + * + * @param key the key to be removed + * @return a new map without a binding for ''key'' + */ + def removed(key: K): C + + /** Alias for `removed` */ + @`inline` final def - (key: K): C = removed(key) + + @deprecated("Use -- with an explicit collection", "2.13.0") + def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * $willForceEvaluation + * + * @param keys the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def removedAll(keys: IterableOnce[K]): C = keys.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for `removedAll` */ + @`inline` final override def -- (keys: IterableOnce[K]): C = removedAll(keys) + + /** Creates a new map obtained by updating this map with a given key/value pair. + * @param key the key + * @param value the value + * @tparam V1 the type of the added value + * @return A new map with the new key/value mapping added to this map. + */ + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return A new map with the updated mapping with the key + */ + def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + + /** + * Alias for `updated` + * + * @param kv the key/value pair. + * @tparam V1 the type of the value in the key/value pair. + * @return A new map with the new binding added to this map. + */ + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + + /** This function transforms all the values of mappings contained + * in this map with function `f`. + * + * @param f A function over keys and values + * @return the updated map + */ + def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } + + override def keySet: Set[K] = new ImmutableKeySet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem + def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this + } + +} + +trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends MapOps[K, V, CC, C] + with collection.StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]): CC[K, V1] = { + var result: CC[K, V1] = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + + +/** + * $factoryInfo + * @define coll immutable map + * @define Coll `immutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory[Map] { + + @SerialVersionUID(3L) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K => V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + def get(key: K): Option[V] = underlying.get(key) + + override def default(key: K): V = defaultValue(key) + + override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory + + def iterator: Iterator[(K, V)] = underlying.iterator + + override def isEmpty: Boolean = underlying.isEmpty + + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + new WithDefault(underlying.concat(xs), defaultValue) + + def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + + def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): Map[K, V] = + it match { + case it: Iterable[_] if it.isEmpty => empty[K, V] + case m: Map[K, V] => m + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl + + @SerialVersionUID(3L) + private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def keysIterator: Iterator[Any] = Iterator.empty + override def valuesIterator: Iterator[Nothing] = Iterator.empty + def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) + def removed(key: Any): Map[Any, Nothing] = this + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]): Map[Any, V2] = suffix match { + case m: immutable.Map[Any, V2] => m + case _ => super.concat(suffix) + } + } + + @SerialVersionUID(3L) + final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) + override def keysIterator: Iterator[K] = Iterator.single(key1) + override def valuesIterator: Iterator[V] = Iterator.single(value1) + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map1(key1, value) + else new Map2(key1, value1, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) Map.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = + if (pred((key1, value1)) != isFlipped) this else Map.empty + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] + else new Map1(key1, walue1) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 1 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map2Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map2Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map2Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 2 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map2(key1, value, key2, value2) + else if (key == key2) new Map2(key1, value1, key2, value) + else new Map3(key1, value1, key2, value2, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map1(key2, value2) + else if (key == key2) new Map1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1 = null.asInstanceOf[K] + var v1 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map2(key1, walue1, key2, walue2) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 2 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map3Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map3Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map3Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 3 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map3(key1, value, key2, value2, key3, value3) + else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) + else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) + else new Map4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map2(key2, value2, key3, value3) + else if (key == key2) new Map2(key1, value1, key3, value3) + else if (key == key3) new Map2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2 = null.asInstanceOf[K] + var v1, v2 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map3(key1, walue1, key2, walue2, key3, walue3) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 3 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) + extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + + override def size: Int = 4 + override def knownSize: Int = 4 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map4Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map4Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map4Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 4 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case 3 => nextResult(key4, value4) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) + else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2, k3 = null.asInstanceOf[K] + var v1, v2, v3 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1} + if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => new Map3(k1, v1, k2, v2, k3, v3) + case 4 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + val walue4 = f(key4, value4) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && + (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) + } + private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = + builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 4 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key4, value4) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] + +private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { + private[this] var elems: Map[K, V] = Map.empty + private[this] var switchedToHashMapBuilder: Boolean = false + private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) + else elems.getOrElse(key, value) + + override def clear(): Unit = { + elems = Map.empty + if (hashMapBuilder != null) { + hashMapBuilder.clear() + } + switchedToHashMapBuilder = false + } + + override def result(): Map[K, V] = + if (switchedToHashMapBuilder) hashMapBuilder.result() else elems + + def addOne(key: K, value: V): this.type = { + if (switchedToHashMapBuilder) { + hashMapBuilder.addOne(key, value) + } else if (elems.size < 4) { + elems = elems.updated(key, value) + } else { + // assert(elems.size == 4) + if (elems.contains(key)) { + elems = elems.updated(key, value) + } else { + switchedToHashMapBuilder = true + if (hashMapBuilder == null) { + hashMapBuilder = new HashMapBuilder + } + elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) + hashMapBuilder.addOne(key, value) + } + } + + this + } + + def addOne(elem: (K, V)) = addOne(elem._1, elem._2) + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToHashMapBuilder) { + hashMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala new file mode 100644 index 000000000000..d1ee494711a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala @@ -0,0 +1,507 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} + +/** `NumericRange` is a more generic version of the + * `Range` class which works with arbitrary types. + * It must be supplied with an `Integral` implementation of the + * range type. + * + * Factories for likely types include `Range.BigInt`, `Range.Long`, + * and `Range.BigDecimal`. `Range.Int` exists for completeness, but + * the `Int`-based `scala.Range` should be more performant. + * + * {{{ + * val r1 = Range(0, 100, 1) + * val veryBig = Int.MaxValue.toLong + 1 + * val r2 = Range.Long(veryBig, veryBig + 100, 1) + * assert(r1 sameElements r2.map(_ - veryBig)) + * }}} + * + * @define Coll `NumericRange` + * @define coll numeric range + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed class NumericRange[T]( + val start: T, + val end: T, + val step: T, + val isInclusive: Boolean +)(implicit + num: Integral[T] +) + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] + with Serializable { self => + + override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { + import scala.collection.convert._ + import impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) + case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) + case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + } + s.asInstanceOf[S with EfficientSplit] + } + + + /** Note that NumericRange must be invariant so that constructs + * such as "1L to 10 by 5" do not infer the range type as AnyVal. + */ + import num._ + + // See comment in Range for why this must be lazy. + override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) + override lazy val isEmpty: Boolean = ( + (num.gt(start, end) && num.gt(step, num.zero)) + || (num.lt(start, end) && num.lt(step, num.zero)) + || (num.equiv(start, end) && !isInclusive) + ) + override def last: T = + if (isEmpty) Nil.head + else locationAfterN(length - 1) + override def init: NumericRange[T] = + if (isEmpty) Nil.init + else new NumericRange(start, end - step, step, isInclusive) + + override def head: T = if (isEmpty) Nil.head else start + override def tail: NumericRange[T] = + if (isEmpty) Nil.tail + else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) + else new NumericRange.Exclusive(start + step, end, step) + + /** Create a new range with the start and end values of this range and + * a new `step`. + */ + def by(newStep: T): NumericRange[T] = copy(start, end, newStep) + + + /** Create a copy of this range. + */ + def copy(start: T, end: T, step: T): NumericRange[T] = + new NumericRange(start, end, step, isInclusive) + + @throws[IndexOutOfBoundsException] + def apply(idx: Int): T = { + if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})") + else locationAfterN(idx) + } + + override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { + var count = 0 + var current = start + while (count < length) { + f(current) + current += step + count += 1 + } + } + + // TODO: these private methods are straight copies from Range, duplicated + // to guard against any (most likely illusory) performance drop. They should + // be eliminated one way or another. + + // Tests whether a number is within the endpoints, without testing + // whether it is a member of the sequence (i.e. when step > 1.) + private def isWithinBoundaries(elem: T) = !isEmpty && ( + (step > zero && start <= elem && elem <= last ) || + (step < zero && last <= elem && elem <= start) + ) + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private def locationAfterN(n: Int): T = start + (step * fromInt(n)) + + private def crossesTheEndAfterN(n: Int): Boolean = { + // if we're sure that subtraction in the context of T won't overflow, we use this function + // to calculate the length of the range + def unsafeRangeLength(r: NumericRange[T]): T = { + val diff = num.minus(r.end, r.start) + val quotient = num.quot(diff, r.step) + val remainder = num.rem(diff, r.step) + if (!r.isInclusive && num.equiv(remainder, num.zero)) + num.max(quotient, num.zero) + else + num.max(num.plus(quotient, num.one), num.zero) + } + + // detects whether value can survive a bidirectional trip to -and then from- Int. + def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) + + val stepIsInTheSameDirectionAsStartToEndVector = + (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) + + if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 + + val sameSign = num.equiv(num.sign(start), num.sign(end)) + + if (sameSign) { // subtraction is safe + val len = unsafeRangeLength(this) + if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) + } else { + // split to two ranges, which subtraction is safe in both of them (around zero) + val stepsRemainderToZero = num.rem(start, step) + val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) + val closestToZero = if (walksOnZero) -step else stepsRemainderToZero + + /* + When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, + so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). + Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. + After performing such operation, there are some elements remaining in between and around zero, + which their length is represented by carry. + */ + val (l: NumericRange[T], r: NumericRange[T], carry: Int) = + if (num.lt(start, num.zero)) { + if (walksOnZero) { + val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) + } else { + (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) + } + } else { + if (walksOnZero) { + val twoStepsAfterZero = num.times(step, num.fromInt(2)) + (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) + } else { + val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) + } + } + + val leftLength = unsafeRangeLength(l) + val rightLength = unsafeRangeLength(r) + + // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, + // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) + if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) + n - num.toInt(leftLength) - carry >= num.toInt(rightLength) + else + num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) + } + } + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private def newEmptyRange(value: T) = NumericRange(value, value, step) + + override def take(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (crossesTheEndAfterN(n)) this + else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) + } + + override def drop(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) this + else if (crossesTheEndAfterN(n)) newEmptyRange(end) + else copy(locationAfterN(n), end, step) + } + + override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) + + override def reverse: NumericRange[T] = + if (isEmpty) this + else { + val newStep = -step + if (num.sign(newStep) == num.sign(step)) { + throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") + } else new NumericRange.Inclusive(last, start, newStep) + } + + import NumericRange.defaultOrdering + + override def min[T1 >: T](implicit ord: Ordering[T1]): T = + // We can take the fast path: + // - If the Integral of this NumericRange is also the requested Ordering + // (Integral <: Ordering). This can happen for custom Integral types. + // - The Ordering is the default Ordering of a well-known Integral type. + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) head + else last + } else super.min(ord) + + override def max[T1 >: T](implicit ord: Ordering[T1]): T = + // See comment for fast path in min(). + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) last + else head + } else super.max(ord) + + // a well-typed contains method. + def containsTyped(x: T): Boolean = + isWithinBoundaries(x) && (((x - start) % step) == zero) + + override def contains[A1 >: T](x: A1): Boolean = + try containsTyped(x.asInstanceOf[T]) + catch { case _: ClassCastException => false } + + override def sum[B >: T](implicit num: Numeric[B]): B = { + if (isEmpty) num.zero + else if (size == 1) head + else { + // If there is no overflow, use arithmetic series formula + // a + ... (n terms total) ... + b = n*(a+b)/2 + if ((num eq scala.math.Numeric.IntIsIntegral)|| + (num eq scala.math.Numeric.ShortIsIntegral)|| + (num eq scala.math.Numeric.ByteIsIntegral)|| + (num eq scala.math.Numeric.CharIsIntegral)) { + // We can do math with no overflow in a Long--easy + val exact = (size * ((num toLong head) + (num toInt last))) / 2 + num fromInt exact.toInt + } + else if (num eq scala.math.Numeric.LongIsIntegral) { + // Uh-oh, might be overflow, so we have to divide before we overflow. + // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying + val a = head.toLong + val b = last.toLong + val ans = + if ((size & 1) == 0) (size / 2) * (a + b) + else size * { + // Sum is even, but we might overflow it, so divide in pieces and add back remainder + val ha = a/2 + val hb = b/2 + ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 + } + ans.asInstanceOf[B] + } + else if ((num eq scala.math.Numeric.BigIntIsIntegral) || + (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { + // No overflow, so we can use arithmetic series formula directly + // (not going to worry about running out of memory) + val numAsIntegral = num.asInstanceOf[Integral[B]] + import numAsIntegral._ + ((num fromInt size) * (head + last)) / (num fromInt 2) + } + else { + // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) + if (isEmpty) num.zero + else { + var acc = num.zero + var i = head + var idx = 0 + while(idx < length) { + acc = num.plus(acc, i) + i = i + step + idx = idx + 1 + } + acc + } + } + } + } + + override lazy val hashCode: Int = super.hashCode() + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def equals(other: Any): Boolean = other match { + case x: NumericRange[_] => + (x canEqual this) && (length == x.length) && ( + (isEmpty) || // all empty sequences are equal + (start == x.start && last == x.last) // same length and same endpoints implies equality + ) + case _ => + super.equals(other) + } + + override def toString: String = { + val empty = if (isEmpty) "empty " else "" + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + s"${empty}NumericRange $start $preposition $end$stepped" + } + + override protected[this] def className = "NumericRange" +} + +/** A companion object for numeric ranges. + * @define Coll `NumericRange` + * @define coll numeric range + */ +object NumericRange { + private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { + def FAIL(boundary: T, step: T): Unit = { + val msg = boundary match { + case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" + case _ => "Precision" + } + throw new IllegalArgumentException( + s"$msg inadequate to represent steps of size $step near $boundary" + ) + } + if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) + if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) + } + + /** Calculates the number of elements in a range given start, end, step, and + * whether or not it is inclusive. Throws an exception if step == 0 or + * the number of elements exceeds the maximum Int. + */ + def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { + val zero = num.zero + val upward = num.lt(start, end) + val posStep = num.gt(step, zero) + + if (step == zero) throw new IllegalArgumentException("step cannot be 0.") + else if (start == end) if (isInclusive) 1 else 0 + else if (upward != posStep) 0 + else { + /* We have to be frightfully paranoid about running out of range. + * We also can't assume that the numbers will fit in a Long. + * We will assume that if a > 0, -a can be represented, and if + * a < 0, -a+1 can be represented. We also assume that if we + * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). + * And we assume that numbers wrap rather than cap when they overflow. + */ + // Check whether we can short-circuit by deferring to Int range. + val startint = num.toInt(start) + if (start == num.fromInt(startint)) { + val endint = num.toInt(end) + if (end == num.fromInt(endint)) { + val stepint = num.toInt(step) + if (step == num.fromInt(stepint)) { + return { + if (isInclusive) Range.inclusive(startint, endint, stepint).length + else Range (startint, endint, stepint).length + } + } + } + } + // If we reach this point, deferring to Int failed. + // Numbers may be big. + if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { + bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) + } + val one = num.one + val limit = num.fromInt(Int.MaxValue) + def check(t: T): T = + if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") + else t + // If the range crosses zero, it might overflow when subtracted + val startside = num.sign(start) + val endside = num.sign(end) + num.toInt{ + if (num.gteq(num.times(startside, endside), zero)) { + // We're sure we can subtract these numbers. + // Note that we do not use .rem because of different conventions for Long and BigInt + val diff = num.minus(end, start) + val quotient = check(num.quot(diff, step)) + val remainder = num.minus(diff, num.times(quotient, step)) + if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) + } + else { + // We might not even be able to subtract these numbers. + // Jump in three pieces: + // * start to -1 or 1, whichever is closer (waypointA) + // * one step, which will take us at least to 0 (ends at waypointB) + // * (except with really small numbers) + // * there to the end + val negone = num.fromInt(-1) + val startlim = if (posStep) negone else one + //Use start value if the start value is closer to zero than startlim + // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 + val startdiff = { + if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) + start + else + num.minus(startlim, start) + } + val startq = check(num.quot(startdiff, step)) + val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) + val waypointB = num.plus(waypointA, step) + check { + if (num.lt(waypointB, end) != upward) { + // No last piece + if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) + else num.plus(startq, one) + } + else { + // There is a last piece + val enddiff = num.minus(end,waypointB) + val endq = check(num.quot(enddiff, step)) + val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) + // Now we have to tally up all the pieces + // 1 for the initial value + // startq steps to waypointA + // 1 step to waypointB + // endq steps to the end (one less if !isInclusive and last==end) + num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) + } + } + } + } + } + } + + @SerialVersionUID(3L) + class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, true) { + override def copy(start: T, end: T, step: T): Inclusive[T] = + NumericRange.inclusive(start, end, step) + + def exclusive: Exclusive[T] = NumericRange(start, end, step) + } + + @SerialVersionUID(3L) + class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, false) { + override def copy(start: T, end: T, step: T): Exclusive[T] = + NumericRange(start, end, step) + + def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) + } + + def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = + new Exclusive(start, end, step) + def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = + new Inclusive(start, end, step) + + private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( + Numeric.BigIntIsIntegral -> Ordering.BigInt, + Numeric.IntIsIntegral -> Ordering.Int, + Numeric.ShortIsIntegral -> Ordering.Short, + Numeric.ByteIsIntegral -> Ordering.Byte, + Numeric.CharIsIntegral -> Ordering.Char, + Numeric.LongIsIntegral -> Ordering.Long, + Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal + ) + + @SerialVersionUID(3L) + private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { + import num.mkNumericOps + + private[this] var _hasNext = !self.isEmpty + private[this] var _next: T = self.start + private[this] val lastElement: T = if (_hasNext) self.last else self.start + override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 + def hasNext: Boolean = _hasNext + def next(): T = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = num.plus(value, self.step) + value + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala new file mode 100644 index 000000000000..3d0f8206b6a9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala @@ -0,0 +1,217 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{Builder, ListBuffer} + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. + * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the + * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. + * + * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case + * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, + * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] + * section on `Immutable Queues` for more information. + * + * @define Coll `immutable.Queue` + * @define coll immutable queue + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ + +sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Queue] = Queue + + /** Returns the `n`-th element of this queue. + * The first element is at position `0`. + * + * @param n index of the element to return + * @return the element at position `n` in this queue. + * @throws NoSuchElementException if the queue is too short. + */ + override def apply(n: Int): A = { + def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) + + var index = 0 + var curr = out + + while (index < n && curr.nonEmpty) { + index += 1 + curr = curr.tail + } + + if (index == n) { + if (curr.nonEmpty) curr.head + else if (in.nonEmpty) in.last + else indexOutOfRange() + } else { + val indexFromBack = n - index + val inLength = in.length + if (indexFromBack >= inLength) indexOutOfRange() + else in(inLength - indexFromBack - 1) + } + } + + /** Returns the elements in the list as an iterator + */ + override def iterator: Iterator[A] = out.iterator.concat(in.reverse) + + /** Checks if the queue is empty. + * + * @return true, iff there is no element in the queue. + */ + override def isEmpty: Boolean = in.isEmpty && out.isEmpty + + override def head: A = + if (out.nonEmpty) out.head + else if (in.nonEmpty) in.last + else throw new NoSuchElementException("head on empty queue") + + override def tail: Queue[A] = + if (out.nonEmpty) new Queue(in, out.tail) + else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) + else throw new NoSuchElementException("tail on empty queue") + + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + + /* This is made to avoid inefficient implementation of iterator. */ + override def forall(p: A => Boolean): Boolean = + in.forall(p) && out.forall(p) + + /* This is made to avoid inefficient implementation of iterator. */ + override def exists(p: A => Boolean): Boolean = + in.exists(p) || out.exists(p) + + override protected[this] def className = "Queue" + + /** Returns the length of the queue. */ + override def length: Int = in.length + out.length + + override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) + + override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) + + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { + val newIn = that match { + case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) + case that: List[B] => that reverse_::: this.in + case _ => + var result: List[B] = this.in + val iter = that.iterator + while (iter.hasNext) { + result = iter.next() :: result + } + result + } + if (newIn eq this.in) this else new Queue[B](newIn, this.out) + } + + /** Creates a new queue with element added at the end + * of the old queue. + * + * @param elem the element to insert + */ + def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") + @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) + + /** Returns a tuple with the first element in the queue, + * and a new queue with this element removed. + * + * @throws NoSuchElementException + * @return the first element of the queue. + */ + def dequeue: (A, Queue[A]) = out match { + case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) + case x :: xs => (x, new Queue(in, xs)) + case _ => throw new NoSuchElementException("dequeue on empty queue") + } + + /** Optionally retrieves the first element and a queue of the remaining elements. + * + * @return A tuple of the first element of the queue, and a new queue with this element removed. + * If the queue is empty, `None` is returned. + */ + def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @throws NoSuchElementException + * @return the first element. + */ + def front: A = head + + /** Returns a string representation of this queue. + */ + override def toString(): String = mkString("Queue(", ", ", ")") +} + +/** $factoryInfo + * @define Coll `immutable.Queue` + * @define coll immutable queue + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + + def from[A](source: IterableOnce[A]): Queue[A] = source match { + case q: Queue[A] => q + case _ => + val list = List.from(source) + if (list.isEmpty) empty + else new Queue(Nil, list) + } + + def empty[A]: Queue[A] = EmptyQueue + override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) + + private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala new file mode 100644 index 000000000000..66a149840488 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Range.scala @@ -0,0 +1,672 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.RangeStepper +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import scala.util.hashing.MurmurHash3 + +/** The `Range` class represents integer values in range + * ''[start;end)'' with non-zero step value `step`. + * It's a special case of an indexed sequence. + * For example: + * + * {{{ + * val r1 = 0 until 10 + * val r2 = r1.start until r1.end by r1.step + 1 + * println(r2.length) // = 5 + * }}} + * + * Ranges that contain more than `Int.MaxValue` elements can be created, but + * these overfull ranges have only limited capabilities. Any method that + * could require a collection of over `Int.MaxValue` length to be created, or + * could be asked to index beyond `Int.MaxValue` elements will throw an + * exception. Overfull ranges can safely be reduced in size by changing + * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, + * `equals`, and access to the ends of the range (`head`, `last`, `tail`, + * `init`) are also permitted on overfull ranges. + * + * @param start the start of this range. + * @param end the end of the range. For exclusive ranges, e.g. + * `Range(0,3)` or `(0 until 3)`, this is one + * step past the last one in the range. For inclusive + * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, + * it may be in the range if it is not skipped by the step size. + * To find the last element inside a non-empty range, + * use `last` instead. + * @param step the step for the range. + * + * @define coll range + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define doesNotUseBuilders + * '''Note:''' this method does not use builders to construct a new range, + * and its complexity is O(1). + */ +@SerialVersionUID(3L) +sealed abstract class Range( + val start: Int, + val end: Int, + val step: Int +) + extends AbstractSeq[Int] + with IndexedSeq[Int] + with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with IterableFactoryDefaults[Int, IndexedSeq] + with Serializable { range => + + final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) + + override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = new RangeStepper(start, step, 0, length) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private[this] def gap = end.toLong - start.toLong + private[this] def isExact = gap % step == 0 + private[this] def hasStub = isInclusive || !isExact + private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) + + def isInclusive: Boolean + + final override val isEmpty: Boolean = ( + (start > end && step > 0) + || (start < end && step < 0) + || (start == end && !isInclusive) + ) + + private[this] val numRangeElements: Int = { + if (step == 0) throw new IllegalArgumentException("step cannot be 0.") + else if (isEmpty) 0 + else { + val len = longLength + if (len > scala.Int.MaxValue) -1 + else len.toInt + } + } + + final def length = if (numRangeElements < 0) fail() else numRangeElements + + // This field has a sensible value only for non-empty ranges + private[this] val lastElement = step match { + case 1 => if (isInclusive) end else end-1 + case -1 => if (isInclusive) end else end+1 + case _ => + val remainder = (gap % step).toInt + if (remainder != 0) end - remainder + else if (isInclusive) end + else end - step + } + + /** The last element of this range. This method will return the correct value + * even if there are too many elements to iterate over. + */ + final override def last: Int = + if (isEmpty) throw Range.emptyRangeError("last") else lastElement + final override def head: Int = + if (isEmpty) throw Range.emptyRangeError("head") else start + + /** Creates a new range containing all the elements of this range except the last one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the last one. + */ + final override def init: Range = + if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) + + /** Creates a new range containing all the elements of this range except the first one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the first one. + */ + final override def tail: Range = { + if (isEmpty) throw Range.emptyRangeError("tail") + if (numRangeElements == 1) newEmptyRange(end) + else if(isInclusive) new Range.Inclusive(start + step, end, step) + else new Range.Exclusive(start + step, end, step) + } + + override def map[B](f: Int => B): IndexedSeq[B] = { + validateMaxLength() + super.map(f) + } + + final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = + if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) + + /** Create a new range with the `start` and `end` values of this range and + * a new `step`. + * + * @return a new range with a different step + */ + final def by(step: Int): Range = copy(start, end, step) + + // Check cannot be evaluated eagerly because we have a pattern where + // ranges are constructed like: "x to y by z" The "x to y" piece + // should not trigger an exception. So the calculation is delayed, + // which means it will not fail fast for those cases where failing was + // correct. + private[this] def validateMaxLength(): Unit = { + if (numRangeElements < 0) + fail() + } + private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) + private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") + + @throws[IndexOutOfBoundsException] + final def apply(idx: Int): Int = { + validateMaxLength() + if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})") + else start + (step * idx) + } + + /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { + // Implementation chosen on the basis of favorable microbenchmarks + // Note--initialization catches step == 0 so we don't need to here + if (!isEmpty) { + var i = start + while (true) { + f(i) + if (i == lastElement) return + i += step + } + } + } + + override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos >= from) pos else -1 + case _ => super.indexOf(elem, from) + } + + override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos <= end) pos else -1 + case _ => super.lastIndexOf(elem, end) + } + + private[this] def posOf(i: Int): Int = + if (contains(i)) (i - start) / step else -1 + + override def sameElements[B >: Int](that: IterableOnce[B]): Boolean = that match { + case other: Range => + (this.length : @annotation.switch) match { + case 0 => other.isEmpty + case 1 => other.length == 1 && this.start == other.start + case n => other.length == n && ( + (this.start == other.start) + && (this.step == other.step) + ) + } + case _ => super.sameElements(that) + } + + /** Creates a new range containing the first `n` elements of this range. + * + * @param n the number of elements to take. + * @return a new range consisting of `n` first elements. + */ + final override def take(n: Int): Range = + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (n >= numRangeElements && numRangeElements >= 0) this + else { + // May have more than Int.MaxValue elements in range (numRangeElements < 0) + // but the logic is the same either way: take the first n + new Range.Inclusive(start, locationAfterN(n - 1), step) + } + + /** Creates a new range containing all the elements of this range except the first `n` elements. + * + * @param n the number of elements to drop. + * @return a new range consisting of all the elements of this range except `n` first elements. + */ + final override def drop(n: Int): Range = + if (n <= 0 || isEmpty) this + else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) + else { + // May have more than Int.MaxValue elements (numRangeElements < 0) + // but the logic is the same either way: go forwards n steps, keep the rest + copy(locationAfterN(n), end, step) + } + + /** Creates a new range consisting of the last `n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def takeRight(n: Int): Range = { + if (n <= 0) newEmptyRange(start) + else if (numRangeElements >= 0) drop(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last + val x = y - step.toLong*(n-1) + if ((step > 0 && x < start) || (step < 0 && x > start)) this + else Range.inclusive(x.toInt, y, step) + } + } + + /** Creates a new range consisting of the initial `length - n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def dropRight(n: Int): Range = { + if (n <= 0) this + else if (numRangeElements >= 0) take(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last - step.toInt*n + if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) + else Range.inclusive(start, y.toInt, step) + } + } + + // Advance from the start while we meet the given test + private[this] def argTakeWhile(p: Int => Boolean): Long = { + if (isEmpty) start + else { + var current = start + val stop = last + while (current != stop && p(current)) current += step + if (current != stop || !p(current)) current + else current.toLong + step + } + } + + final override def takeWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop==start) newEmptyRange(start) + else { + val x = (stop - step).toInt + if (x == last) this + else Range.inclusive(start, x, step) + } + } + + final override def dropWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop == start) this + else { + val x = (stop - step).toInt + if (x == last) newEmptyRange(last) + else Range.inclusive(x + step, last, step) + } + } + + final override def span(p: Int => Boolean): (Range, Range) = { + val border = argTakeWhile(p) + if (border == start) (newEmptyRange(start), this) + else { + val x = (border - step).toInt + if (x == last) (this, newEmptyRange(last)) + else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) + } + } + + /** Creates a new range containing the elements starting at `from` up to but not including `until`. + * + * $doesNotUseBuilders + * + * @param from the element at which to start + * @param until the element at which to end (not included in the range) + * @return a new range consisting of a contiguous interval of values in the old range + */ + final override def slice(from: Int, until: Int): Range = + if (from <= 0) take(until) + else if (until >= numRangeElements && numRangeElements >= 0) drop(from) + else { + val fromValue = locationAfterN(from) + if (from >= until) newEmptyRange(fromValue) + else Range.inclusive(fromValue, locationAfterN(until-1), step) + } + + // Overridden only to refine the return type + final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) + + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private[this] def locationAfterN(n: Int) = start + (step * n) + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) + + /** Returns the reverse of this range. + */ + final override def reverse: Range = + if (isEmpty) this + else new Range.Inclusive(last, start, -step) + + /** Make range inclusive. + */ + final def inclusive: Range = + if (isInclusive) this + else new Range.Inclusive(start, end, step) + + final def contains(x: Int): Boolean = { + if (x == end && !isInclusive) false + else if (step > 0) { + if (x < start || x > end) false + else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) + } + else { + if (x < end || x > start) false + else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) + } + } + /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ + override final def contains[B >: Int](elem: B): Boolean = elem match { + case i: Int => this.contains(i) + case _ => super.contains(elem) + } + + final override def sum[B >: Int](implicit num: Numeric[B]): Int = { + if (num eq scala.math.Numeric.IntIsIntegral) { + // this is normal integer range with usual addition. arithmetic series formula can be used + if (isEmpty) 0 + else if (size == 1) head + else ((size * (head.toLong + last)) / 2).toInt + } else { + // user provided custom Numeric, we cannot rely on arithmetic series formula + if (isEmpty) num.toInt(num.zero) + else { + var acc = num.zero + var i = head + while (true) { + acc = num.plus(acc, i) + if (i == lastElement) return num.toInt(acc) + i = i + step + } + 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing + } + } + } + + final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) head + else last + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) last + else head + } else super.min(ord) + + final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) last + else head + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) head + else last + } else super.max(ord) + + override def tails: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.drop(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + + override def inits: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.dropRight(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + final override def equals(other: Any): Boolean = other match { + case x: Range => + // Note: this must succeed for overfull ranges (length > Int.MaxValue) + if (isEmpty) x.isEmpty // empty sequences are equal + else // this is non-empty... + x.nonEmpty && start == x.start && { // ...so other must contain something and have same start + val l0 = last + (l0 == x.last && ( // And same end + start == l0 || step == x.step // And either the same step, or not take any steps + )) + } + case _ => + super.equals(other) + } + + final override def hashCode: Int = + if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) + else super.hashCode + + final override def toString: String = { + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" + s"${prefix}Range $start $preposition $end$stepped" + } + + override protected[this] def className = "Range" + + override def distinct: Range = this + + override def grouped(size: Int): Iterator[Range] = { + require(size >= 1, f"size=$size%d, but size must be positive") + if (isEmpty) { + Iterator.empty + } else { + val s = size + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = Range.this.length > i + override def next() = + if (hasNext) { + val x = Range.this.slice(i, i + s) + i += s + x + } else { + Iterator.empty.next() + } + } + } + } + + override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = + if (ord eq Ordering.Int) { + if (step > 0) { + this + } else { + reverse + } + } else { + super.sorted(ord) + } +} + +/** + * Companion object for ranges. + * @define Coll `Range` + * @define coll range + */ +object Range { + + /** Counts the number of range elements. + * precondition: step != 0 + * If the size of the range exceeds Int.MaxValue, the + * result will be negative. + */ + def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { + if (step == 0) + throw new IllegalArgumentException("step cannot be 0.") + + val isEmpty = + if (start == end) !isInclusive + else if (start < end) step < 0 + else step > 0 + + if (isEmpty) 0 + else { + // Counts with Longs so we can recognize too-large ranges. + val gap: Long = end.toLong - start.toLong + val jumps: Long = gap / step + // Whether the size of this range is one larger than the + // number of full-sized jumps. + val hasStub = isInclusive || (gap % step != 0) + val result: Long = jumps + ( if (hasStub) 1 else 0 ) + + if (result > scala.Int.MaxValue) -1 + else result.toInt + } + } + def count(start: Int, end: Int, step: Int): Int = + count(start, end, step, isInclusive = false) + + /** Make a range from `start` until `end` (exclusive) with given step value. + * @note step != 0 + */ + def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) + + /** Make a range from `start` until `end` (exclusive) with step value 1. + */ + def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) + + /** Make an inclusive range from `start` to `end` with given step value. + * @note step != 0 + */ + def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) + + /** Make an inclusive range from `start` to `end` with step value 1. + */ + def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) + + @SerialVersionUID(3L) + final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = true + } + + @SerialVersionUID(3L) + final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = false + } + + // BigInt and Long are straightforward generic ranges. + object BigInt { + def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) + def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) + } + + object Long { + def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) + def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) + } + + // BigDecimal uses an alternative implementation of Numeric in which + // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for + // details. The intention is for it to throw an exception anytime + // imprecision or surprises might result from anything, although this may + // not yet be fully implemented. + object BigDecimal { + implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral + + def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = + NumericRange(start, end, step) + def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = + NumericRange.inclusive(start, end, step) + } + + // As there is no appealing default step size for not-really-integral ranges, + // we offer a partially constructed object. + class Partial[T, U](private val f: T => U) extends AnyVal { + def by(x: T): U = f(x) + override def toString = "Range requires step" + } + + // Illustrating genericity with Int Range, which should have the same behavior + // as the original Range class. However we leave the original Range + // indefinitely, for performance and because the compiler seems to bootstrap + // off it and won't do so with our parameterized version without modifications. + object Int { + def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) + def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) + } + + private def emptyRangeError(what: String): Throwable = + new NoSuchElementException(what + " on empty Range") +} + +/** + * @param lastElement The last element included in the Range + * @param initiallyEmpty Whether the Range was initially empty or not + */ +@SerialVersionUID(3L) +private class RangeIterator( + start: Int, + step: Int, + lastElement: Int, + initiallyEmpty: Boolean +) extends AbstractIterator[Int] with Serializable { + private[this] var _hasNext: Boolean = !initiallyEmpty + private[this] var _next: Int = start + override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 + def hasNext: Boolean = _hasNext + @throws[NoSuchElementException] + def next(): Int = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = value + step + value + } + + override def drop(n: Int): Iterator[Int] = { + if (n > 0) { + val longPos = _next.toLong + step * n + if (step > 0) { + _next = Math.min(lastElement, longPos).toInt + _hasNext = longPos <= lastElement + } + else if (step < 0) { + _next = Math.max(lastElement, longPos).toInt + _hasNext = longPos >= lastElement + } + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala new file mode 100644 index 000000000000..2e7aa7b472ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala @@ -0,0 +1,1231 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.meta.{getter, setter} +import scala.annotation.tailrec +import scala.runtime.Statics.releaseFence + +/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. + * + * Implementation note: since efficiency is important for data structures this implementation + * uses `null` to represent empty trees. This also means pattern matching cannot + * easily be used. The API represented by the RedBlackTree object tries to hide these + * optimizations behind a reasonably clean API. + */ +private[collection] object RedBlackTree { + + def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + + def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null + def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { + case null => None + case tree => Some(tree.value) + } + + @tailrec + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree + } + private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { + def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) tree + else if (tree.isMutable) { + val res = tree.mutableBlack.makeImmutable + releaseFence() + res + } else tree.black + } + /** Create a new balanced tree where `newLeft` replaces `tree.left`. + * tree and newLeft are never null */ + protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + //Note - unlike the immutable trees we can't consider tree.left eq newLeft + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.mutableBlack + val resultRight = tree.mutableBlackWithLeft(newLeft_right) + + newLeft.mutableWithLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left) + val resultRight = tree.mutableBlackWithLeft(newLeft_right_right) + + newLeft_right.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. + * tree and newRight are never null */ + protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + //Note - unlike the immutable trees we can't consider tree.right eq newRight + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + + val resultLeft = tree.mutableBlackWithRight(newRight_left.left) + val resultRight = newRight.mutableBlackWithLeft(newRight_left.right) + + newRight_left.mutableWithLeftRight(resultLeft, resultRight) + + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + + val resultLeft = tree.mutableBlackWithRight(newRight_left) + val resultRight = newRight_right.mutableBlack + + newRight.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } + private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = + if (tree eq null) { + mutableRedTree(k, (), null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k)) + else tree + } + } + private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = + if (tree eq null) { + mutableRedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree.mutableWithV(v) + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) + else tree.mutableWithV(v) + } + } + + def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) + def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) + def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { + case (Some(from), Some(until)) => this.range(tree, from, until) + case (Some(from), None) => this.from(tree, from) + case (None, Some(until)) => this.until(tree, until) + case (None, None) => tree + } + def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) + def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) + def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) + def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) + + def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) + def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) + def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) + + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.left ne null) result = result.left + result + } + def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.right ne null) result = result.right + result + } + + def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _tail(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tl = tree.left + if (tl eq null) tree.right + else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right) + else tree.redWithLeft(_tail(tree.left)) + } + blacken(_tail(tree)) + } + + def init[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _init(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tr = tree.right + if (tr eq null) tree.left + else if (tr.isBlack) balRight(tree, tree.left, _init(tr)) + else tree.redWithRight(_init(tr)) + } + blacken(_init(tree)) + } + + /** + * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. + */ + def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp == 0) tree + else if (cmp < 0) { + val l = minAfter(tree.left, x) + if (l != null) l else tree + } else minAfter(tree.right, x) + } + + /** + * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp <= 0) maxBefore(tree.left, x) + else { + val r = maxBefore(tree.right, x) + if (r != null) r else tree + } + } + + def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) + + def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) + } + def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) + } + def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b)) + } + + private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = { + if (tree.left ne null) _foreach(tree.left, f) + f((tree.key, tree.value)) + if (tree.right ne null) _foreach(tree.right, f) + } + + def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) + + private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + if (tree.left ne null) _foreachKey(tree.left, f) + f((tree.key)) + if (tree.right ne null) _foreachKey(tree.right, f) + } + + def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) + + private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + if (tree.left ne null) _foreachEntry(tree.left, f) + f(tree.key, tree.value) + if (tree.right ne null) _foreachEntry(tree.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) + + @tailrec + def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + val count = this.count(tree.left) + if (n < count) nth(tree.left, n) + else if (n > count) nth(tree.right, n - count - 1) + else tree + } + + def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack + + @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed + @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack + + private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + + // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` + // for building subtrees. Use `blacken` instead when building top-level trees. + private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = + if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t + + private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { + val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) + new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) + } + + /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ + private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + if (tree.left eq newLeft) tree + else { + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.black + val resultRight = tree.blackWithLeft(newLeft_right) + + newLeft.withLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.blackWithRight(newLeft_right.left) + val resultRight = tree.blackWithLeft(newLeft_right_right) + + newLeft_right.withLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. */ + private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + if (tree.right eq newRight) tree + else { + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + val resultLeft = tree.blackWithRight(newRight_left.left) + val resultRight = newRight.blackWithLeft(newRight_left.right) + + newRight_left.withLeftRight(resultLeft, resultRight) + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + val resultLeft = tree.blackWithRight(newRight_left) + val resultRight = newRight_right.black + + newRight.withLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } + + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + if (overwrite) + tree.withV(v) + else tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + balanceLeft(tree, upd(tree.left, k, v, overwrite)) + else if (cmp > 0) + balanceRight(tree, upd(tree.right, k, v, overwrite)) + else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) + tree.withV(v) + else tree + } + private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else { + val rank = count(tree.left) + 1 + if (idx < rank) + balanceLeft(tree, updNth(tree.left, idx, k, v)) + else if (idx > rank) + balanceRight(tree, updNth(tree.right, idx - rank, k, v)) + else tree + } + + private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) + val newLeft = doFrom(tree.left, from) + if (newLeft eq tree.left) tree + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, tree.right) + } + private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(to, tree.key)) return doTo(tree.left, to) + val newRight = doTo(tree.right, to) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join (tree.left, tree.key, tree.value, newRight) + } + private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) + val newRight = doUntil(tree.right, until) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join(tree.left, tree.key, tree.value, newRight) + } + + private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) + if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) + val newLeft = doFrom(tree.left, from) + val newRight = doUntil(tree.right, until) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, newRight) + } + + private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) tree + else if(n >= tree.count) null + else { + val l = count(tree.left) + if(n > l) doDrop(tree.right, n-l-1) + else if(n == l) join(null, tree.key, tree.value, tree.right) + else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) + } + + private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) null + else if(n >= tree.count) tree + else { + val l = count(tree.left) + if(n <= l) doTake(tree.left, n) + else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) + else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) + } + + private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = + if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null + else if((from <= 0) && (until >= tree.count)) tree + else { + val l = count(tree.left) + if(until <= l) doSlice(tree.left, from, until) + else if(from > l) doSlice(tree.right, from-l-1, until-l-1) + else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1)) + } + + /* + * Forcing direct fields access using the @`inline` annotation helps speed up + * various operations (especially smallest/greatest and update/delete). + * + * Unfortunately the direct field access is not guaranteed to work (but + * works on the current implementation of the Scala compiler). + * + * An alternative is to implement the these classes using plain old Java code... + * + * Mutability + * This implementation encodes both mutable and immutable trees. + * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations + * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk + * API such as filter or ++ + * + * Mutable trees are only used within the confines of this bulk operation and not shared + * Mutable trees may transition to become immutable by calling beforePublish + * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing) + * + * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable + * trees the entire transitive subtree is immutable + * + * Colour, mutablity and size encoding + * The colour of the Tree, its mutablity and size are all encoded in the _count field + * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without + * additional allocation + * The mutable trees always have bits 0 .. 30 (inclusive) set to 0 + * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree + * + * Naming + * All of the methods that can yield a mutable result have "mutable" on their name, and generally there + * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when + * reviewing changes. e.g. + * def upd(...) will update an immutable Tree, producing an immutable Tree + * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree + * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree + * + */ + private[immutable] final class Tree[A, +B]( + @(`inline` @getter @setter) private var _key: A, + @(`inline` @getter @setter) private var _value: AnyRef, + @(`inline` @getter @setter) private var _left: Tree[A, _], + @(`inline` @getter @setter) private var _right: Tree[A, _], + @(`inline` @getter @setter) private var _count: Int) + { + @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 + // read only APIs + @`inline` private[RedBlackTree] final def count = { + //devTimeAssert((_count & 0x7FFFFFFF) != 0) + _count & colourMask + } + //retain the colour, and mark as mutable + @`inline` private def mutableRetainingColour = _count & colourBit + + //inlined here to avoid outer object null checks + @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + @`inline` private[immutable] final def key = _key + @`inline` private[immutable] final def value = _value.asInstanceOf[B] + @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] + @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isBlack = _count < 0 + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isRed = _count >= 0 + + override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" + + //mutable APIs + private[RedBlackTree] def makeImmutable: Tree[A, B] = { + def makeImmutableImpl() = { + if (isMutable) { + var size = 1 + if (_left ne null) { + _left.makeImmutable + size += _left.count + } + if (_right ne null) { + _right.makeImmutable + size += _right.count + } + _count |= size //retains colour + } + this + } + makeImmutableImpl() + this + } + + private[RedBlackTree] def mutableBlack: Tree[A, B] = { + if (isBlack) this + else if (isMutable) { + _count = initialBlackCount + this + } + else new Tree(_key, _value, _left, _right, initialBlackCount) + } +// private[RedBlackTree] def mutableRed: Tree[A, B] = { +// if (isRed) this +// else if (mutable) { +// _count = initialRedCount +// this +// } +// else new Tree(_key, _value, _left, _right, initialRedCount) +// } + + private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else if (isMutable) { + _value = newValue.asInstanceOf[AnyRef] + this + } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) + } + + private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if (_left eq newLeft) this + else if (isMutable) { + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if (_right eq newRight) this + else if (isMutable) { + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && (_right eq newRight)) this + else if (isMutable) { + _left = newLeft + _right = newRight + this + } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, initialBlackCount) + } + private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_right eq newRight) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, initialBlackCount) + } + + private[RedBlackTree] def black: Tree[A, B] = { + //assertNotMutable(this) + if (isBlack) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def red: Tree[A, B] = { + //assertNotMutable(this) + if (isRed) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && + (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this + else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + + private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if (newLeft eq _left) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) + } + } + private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if (newRight eq _right) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if ((newRight eq _right) && isBlack) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newRight eq _right) && isRed) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right)) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size) + } + } + } + //see #Tree docs "Colour, mutablity and size encoding" + //we make these final vals because the optimiser inlines them, without reference to the enclosing module + private[RedBlackTree] final val colourBit = 0x80000000 + //really its ~colourBit but that doesnt get inlined + private[RedBlackTree] final val colourMask = colourBit - 1 + private[RedBlackTree] final val initialBlackCount = colourBit + private[RedBlackTree] final val initialRedCount = 0 + + @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) + @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) + + /** create a new immutable red tree. + * left and right may be null + */ + private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) + } + private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size) + } + @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + //immutable APIs + //assertions - uncomment decls and callers when changing functionality + // private def devTimeAssert(assertion: Boolean) = { + // //uncomment this during development of the functionality + // assert(assertion) + // } + // private def assertNotMutable(t:Tree[_,_]) = { + // devTimeAssert ((t eq null) || t.count > 0) + // } + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { + protected[this] def nextResult(tree: Tree[A, B]): R + + override def hasNext: Boolean = lookahead ne null + + @throws[NoSuchElementException] + override def next(): R = { + val tree = lookahead + if(tree ne null) { + lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) + nextResult(tree) + } else Iterator.empty.next() + } + + @tailrec + protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else if (tree.left eq null) tree + else findLeftMostOrPopOnEmpty(goLeft(tree)) + + @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { + stackOfNexts(index) = tree + index += 1 + } + @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { + index -= 1 + stackOfNexts(index) + } + + protected[this] val stackOfNexts = if (root eq null) null else { + /* + * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. + * + * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) + * + * Although we don't store the deepest nodes in the path during iteration, + * we potentially do so in `startFrom`. + */ + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 + new Array[Tree[A, B]](maximumHeight) + } + private[this] var index = 0 + protected var lookahead: Tree[A, B] = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + + /** + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * the leftmost subtree with the key that would be "next" after it according + * to the ordering. Along the way build up the iterator's path stack so that "next" + * functionality works. + */ + private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { + @tailrec def find(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else find( + if (ordering.lteq(key, tree.key)) goLeft(tree) + else goRight(tree) + ) + find(root) + } + + @`inline` private[this] def goLeft(tree: Tree[A, B]) = { + pushNext(tree) + tree.left + } + + @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right + } + + private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { + override def nextResult(tree: Tree[A, B]) = ??? + + def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key) + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameValues[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameEntries[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + } + private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { + override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + } + + private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.key + } + + private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.value + } + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, Null] = size match { + case 0 => null + case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(x, null, left, right) + } + f(1, size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + mkTree(level != maxUsedDepth || level == 1, k, v, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(k, v, left, right) + } + f(1, size) + } + + def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] = + if(t eq null) null + else { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = transform(l, f) + val v2 = f(k, v) + val r2 = transform(r, f) + if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) + && (l2 eq l) + && (r2 eq r)) t.asInstanceOf[Tree[A, C]] + else mkTree(t.isBlack, k, v2, l2, r2) + } + + def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else { + def fk(t: Tree[A, B]): Tree[A, B] = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = if(l eq null) null else fk(l) + val keep = f(k, v) + val r2 = if(r eq null) null else fk(r) + if(!keep) join2(l2, r2) + else if((l2 eq l) && (r2 eq r)) t + else join(l2, k, v, r2) + } + blacken(fk(t)) + } + + private[this] val null2 = (null, null) + + def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { + if (t eq null) null2 + else { + object partitioner { + var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk + def fk(t: Tree[A, B]): Unit = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + var l2k, l2d, r2k, r2d = null: Tree[A, B] + if (l ne null) { + fk(l) + l2k = tmpk + l2d = tmpd + } + val keep = p(k, v) + if (r ne null) { + fk(r) + r2k = tmpk + r2d = tmpd + } + val jk = + if (!keep) join2(l2k, r2k) + else if ((l2k eq l) && (r2k eq r)) t + else join(l2k, k, v, r2k) + val jd = + if (keep) join2(l2d, r2d) + else if ((l2d eq l) && (r2d eq r)) t + else join(l2d, k, v, r2d) + tmpk = jk + tmpd = jd + } + } + + partitioner.fk(t) + (blacken(partitioner.tmpk), blacken(partitioner.tmpd)) + } + } + + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] + // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ + + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) { + val newLeft = del(tree.left, k) + if (newLeft eq tree.left) tree + else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right) + else tree.redWithLeft(newLeft) + } else if (cmp > 0) { + val newRight = del(tree.right, k) + if (newRight eq tree.right) tree + else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight) + else tree.redWithRight(newRight) + } else append(tree.left, tree.right) + } + + private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) { + if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black) + else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr)) + else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else tree.blackWithLeftRight(tl, tr) + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black) + else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right)) + else tree.blackWithLeftRight(tl, tr) + } else tree.blackWithLeftRight(tl, tr) + + private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr) + else if (isBlackTree(tr)) balance(tree, tl, tr.red) + else if (isRedTree(tr) && isBlackTree(tr.left)) + tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red)) + else sys.error("Defect: invariance violation") + + private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black) + else if (isBlackTree(tl)) balance(tree, tl.red, tr) + else if (isRedTree(tl) && isBlackTree(tl.right)) + tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else sys.error("Defect: invariance violation") + + /** `append` is similar to `join2` but requires that both subtrees have the same black height */ + private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = { + if (tl eq null) tr + else if (tr eq null) tl + else if (tl.isRed) { + if (tr.isRed) { + //tl is red, tr is red + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else tl.withRight(tr.withLeft(bc)) + } else { + //tl is red, tr is black + tl.withRight(append(tl.right, tr)) + } + } else { + if (tr.isBlack) { + //tl is black tr is black + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else balLeft(tl, tl.left, tr.withLeft(bc)) + } else { + //tl is black tr is red + tr.withLeft(append(tl, tr.left)) + } + } + } + + + // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf) + // We don't store the black height in the tree so we pass it down into the join methods and derive the black height + // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it. + // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference. + + def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2)) + + def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2)) + + def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] = + blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]])) + + /** Compute the rank from a tree and its black height */ + @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = { + if(t eq null) 0 + else if(t.isBlack) 2*(bh-1) + else 2*bh-1 + } + + private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = { + val rtl = rank(tl, bhtl) + if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr) + else { + val tlBlack = isBlackTree(tl) + val bhtlr = if(tlBlack) bhtl-1 else bhtl + val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr) + if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right)) + RedTree(ttr.key, ttr.value, + BlackTree(tl.key, tl.value, tl.left, ttr.left), + ttr.right.black) + else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr) + } + } + + private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = { + val rtr = rank(tr, bhtr) + if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr) + else { + val trBlack = isBlackTree(tr) + val bhtrl = if(trBlack) bhtr-1 else bhtr + val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl) + if(trBlack && isRedTree(ttl) && isRedTree(ttl.left)) + RedTree(ttl.key, ttl.value, + ttl.left.black, + BlackTree(tr.key, tr.value, ttl.right, tr.right)) + else mkTree(trBlack, tr.key, tr.value, ttl, tr.right) + } + } + + private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = { + @tailrec def h(t: Tree[_, _], i: Int): Int = + if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i) + val bhtl = h(tl, 0) + val bhtr = h(tr, 0) + if(bhtl > bhtr) { + val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr)) + if(isRedTree(tt) && isRedTree(tt.right)) tt.black + else tt + } else if(bhtr > bhtl) { + val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr) + if(isRedTree(tt) && isRedTree(tt.left)) tt.black + else tt + } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr) + } + + private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) = + if(t eq null) (null, null, null, k2) + else { + val cmp = ordering.compare(k2, t.key) + if(cmp == 0) (t.left, t, t.right, t.key) + else if(cmp < 0) { + val (ll, b, lr, k1) = split(t.left, k2) + (ll, b, join(lr, t.key, t.value, t.right), k1) + } else { + val (rl, b, rr, k1) = split(t.right, k2) + (join(t.left, t.key, t.value, rl), b, rr, k1) + } + } + + private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) = + if(t.right eq null) (t.left, t.key, t.value) + else { + val (tt, kk, vv) = splitLast(t.right) + (join(t.left, t.key, t.value, tt), kk, vv) + } + + private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if(tl eq null) tr + else if(tr eq null) tl + else { + val (ttl, k, v) = splitLast(tl) + join(ttl, k, v, tr) + } + + private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t1 eq t2)) t2 + else if(t2 eq null) t1 + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _union(l1, t2.left) + val tr = _union(r1, t2.right) + join(tl, k1, t2.value, tr) + } + + private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) null + else if (t1 eq t2) t1 + else { + val (l1, b, r1, k1) = split(t1, t2.key) + val tl = _intersect(l1, t2.left) + val tr = _intersect(r1, t2.right) + if(b ne null) join(tl, k1, t2.value, tr) + else join2(tl, tr) + } + + private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) t1 + else if (t1 eq t2) null + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _difference(l1, t2.left) + val tr = _difference(r1, t2.right) + join2(tl, tr) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index 5184cadaccae..925fd648c70c 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -14,8 +14,6 @@ package scala package collection package immutable -import language.experimental.captureChecking - trait Seq[+A] extends Iterable[A] with collection.Seq[A] with SeqOps[A, Seq, Seq[A]] @@ -30,7 +28,7 @@ trait Seq[+A] extends Iterable[A] * @define coll immutable sequence * @define Coll `immutable.Seq` */ -trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] +trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] /** * $factoryInfo @@ -39,7 +37,7 @@ trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] */ @SerialVersionUID(3L) object Seq extends SeqFactory.Delegate[Seq](List) { - override def from[E](it: IterableOnce[E]^): Seq[E] = it match { + override def from[E](it: IterableOnce[E]): Seq[E] = it match { case s: Seq[E] => s case _ => super.from(it) } @@ -59,7 +57,7 @@ trait IndexedSeq[+A] extends Seq[A] } - override def sameElements[B >: A](o: IterableOnce[B]^): Boolean = o match { + override def sameElements[B >: A](o: IterableOnce[B]): Boolean = o match { case that: IndexedSeq[_] => (this eq that) || { val length = this.length @@ -112,7 +110,7 @@ object IndexedSeqDefaults { @SerialVersionUID(3L) object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](Vector) { - override def from[E](it: IterableOnce[E]^): IndexedSeq[E] = it match { + override def from[E](it: IterableOnce[E]): IndexedSeq[E] = it match { case is: IndexedSeq[E] => is case _ => super.from(it) } @@ -143,14 +141,14 @@ trait LinearSeq[+A] @SerialVersionUID(3L) object LinearSeq extends SeqFactory.Delegate[LinearSeq](List) { - override def from[E](it: IterableOnce[E]^): LinearSeq[E] = it match { + override def from[E](it: IterableOnce[E]): LinearSeq[E] = it match { case ls: LinearSeq[E] => ls case _ => super.from(it) } } trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] - extends AnyRef with SeqOps[A, CC, C] + extends Any with SeqOps[A, CC, C] with collection.LinearSeqOps[A, CC, C] /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala new file mode 100644 index 000000000000..aca9e139165e --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala @@ -0,0 +1,276 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** A base trait for ordered, immutable maps. + * + * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs + * without regard to ordering. + * + * All behavior is defined in terms of the abstract methods in `SeqMap`. + * It is sufficient for concrete subclasses to implement those methods. + * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] + extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + + +object SeqMap extends MapFactory[SeqMap] { + def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): SeqMap[K, V] = + it match { + case sm: SeqMap[K, V] => sm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl + + @SerialVersionUID(3L) + private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) + def removed(key: Any): SeqMap[Any, Nothing] = this + } + + @SerialVersionUID(3L) + private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator = Iterator.single((key1, value1)) + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap1(key1, value) + else new SeqMap2(key1, value1, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) SeqMap.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + } + } + + @SerialVersionUID(3L) + private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap2(key1, value, key2, value2) + else if (key == key2) new SeqMap2(key1, value1, key2, value) + else new SeqMap3(key1, value1, key2, value2, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap1(key2, value2) + else if (key == key2) new SeqMap1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + } + } + + @SerialVersionUID(3L) + private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) + else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) + else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap2(key2, value2, key3, value3) + else if (key == key2) new SeqMap2(key1, value1, key3, value3) + else if (key == key3) new SeqMap2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + } + } + + @SerialVersionUID(3L) + private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 4 + override def knownSize: Int = 4 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) + else { + // Directly create the elements for performance reasons + val fields = Vector(key1, key2, key3, key4, key) + val underlying: Map[K, (Int, V1)] = + HashMap( + (key1, (0, value1)), + (key2, (1, value2)), + (key3, (2, value3)), + (key4, (3, value4)), + (key, (4, value)) + ) + new VectorMap(fields, underlying) + } + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + f(key4, value4) + } + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala new file mode 100644 index 000000000000..f07eb66991c8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -0,0 +1,398 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.immutable.Set.Set4 +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** Base trait for immutable set collections */ +trait Set[A] extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + override def iterableFactory: IterableFactory[Set] = Set +} + +/** Base trait for immutable set operations + * + * @define coll immutable set + * @define Coll `immutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] { + + /** Creates a new set with an additional element, unless the element is + * already present. + * + * @param elem the element to be added + * @return a new set that contains all elements of this set and that also + * contains `elem`. + */ + def incl(elem: A): C + + /** Alias for `incl` */ + override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated + + /** Creates a new set with a given element removed from this set. + * + * @param elem the element to be removed + * @return a new set that contains all elements of this set but that does not + * contain `elem`. + */ + def excl(elem: A): C + + /** Alias for `excl` */ + @`inline` final override def - (elem: A): C = excl(elem) + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param that the collection containing the elements to remove. + * @return a new $coll with the given elements removed, omitting duplicates. + */ + def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for removedAll */ + override final def -- (that: IterableOnce[A]): C = removedAll(that) +} + +trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with collection.StrictOptimizedSetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: collection.IterableOnce[A]): C = { + var result: C = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +/** + * $factoryInfo + * @define coll immutable set + * @define Coll `immutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory[Set] { + + def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] + + def from[E](it: collection.IterableOnce[E]): Set[E] = + it match { + // We want `SortedSet` (and subclasses, such as `BitSet`) to + // rebuild themselves to avoid element type widening issues + case _: SortedSet[E] => (newBuilder[E] ++= it).result() + case _ if it.knownSize == 0 => empty[E] + case s: Set[E] => s + case _ => (newBuilder[E] ++= it).result() + } + + def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] + + /** An optimized representation for immutable empty sets */ + @SerialVersionUID(3L) + private object EmptySet extends AbstractSet[Any] with Serializable { + override def size: Int = 0 + override def isEmpty = true + override def knownSize: Int = size + override def filter(pred: Any => Boolean): Set[Any] = this + override def filterNot(pred: Any => Boolean): Set[Any] = this + override def removedAll(that: IterableOnce[Any]): Set[Any] = this + override def diff(that: collection.Set[Any]): Set[Any] = this + override def subsetOf(that: collection.Set[Any]): Boolean = true + override def intersect(that: collection.Set[Any]): Set[Any] = this + override def view: View[Any] = View.empty + def contains(elem: Any): Boolean = false + def incl(elem: Any): Set[Any] = new Set1(elem) + def excl(elem: Any): Set[Any] = this + def iterator: Iterator[Any] = Iterator.empty + override def foreach[U](f: Any => U): Unit = () + } + private[collection] def emptyInstance: Set[Any] = EmptySet + + @SerialVersionUID(3L) + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { + private[this] var current = 0 + private[this] var remainder = n + override def knownSize: Int = remainder + def hasNext = remainder > 0 + def apply(i: Int): A + def next(): A = + if (hasNext) { + val r = apply(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + } + + /** An optimized representation for immutable sets of size 1 */ + @SerialVersionUID(3L) + final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 1 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set2(elem1, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) Set.empty + else this + def iterator: Iterator[A] = Iterator.single(elem1) + override def foreach[U](f: A => U): Unit = f(elem1) + override def exists(p: A => Boolean): Boolean = p(elem1) + override def forall(p: A => Boolean): Boolean = p(elem1) + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = + if (pred(elem1) != isFlipped) this else Set.empty + + override def find(p: A => Boolean): Option[A] = + if (p(elem1)) Some(elem1) + else None + override def head: A = elem1 + override def tail: Set[A] = Set.empty + } + + /** An optimized representation for immutable sets of size 2 */ + @SerialVersionUID(3L) + final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 2 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 || elem == elem2 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set3(elem1, elem2, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set1(elem2) + else if (elem == elem2) new Set1(elem1) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set1(elem2) + } + + /** An optimized representation for immutable sets of size 3 */ + @SerialVersionUID(3L) + final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 3 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set4(elem1, elem2, elem3, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set2(elem2, elem3) + else if (elem == elem2) new Set2(elem1, elem3) + else if (elem == elem3) new Set2(elem1, elem2) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set2(elem2, elem3) + } + + /** An optimized representation for immutable sets of size 4 */ + @SerialVersionUID(3L) + final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 4 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem + def excl(elem: A): Set[A] = + if (elem == elem1) new Set3(elem2, elem3, elem4) + else if (elem == elem2) new Set3(elem1, elem3, elem4) + else if (elem == elem3) new Set3(elem1, elem2, elem4) + else if (elem == elem4) new Set3(elem1, elem2, elem3) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3); f(elem4) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) || p(elem4) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) && p(elem4) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2, r3: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1} + if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => new Set3(r1, r2, r3) + case 4 => this + } + } + + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else if (p(elem4)) Some(elem4) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set3(elem2, elem3, elem4) + + private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = + builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) + } +} + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] + +/** Builder for Set. + * $multipleResults + */ +private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { + private[this] var elems: Set[A] = Set.empty + private[this] var switchedToHashSetBuilder: Boolean = false + private[this] var hashSetBuilder: HashSetBuilder[A] = _ + + override def clear(): Unit = { + elems = Set.empty + if (hashSetBuilder != null) { + hashSetBuilder.clear() + } + switchedToHashSetBuilder = false + } + + override def result(): Set[A] = + if (switchedToHashSetBuilder) hashSetBuilder.result() else elems + + def addOne(elem: A) = { + if (switchedToHashSetBuilder) { + hashSetBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem)) { + () // do nothing + } else { + switchedToHashSetBuilder = true + if (hashSetBuilder == null) { + hashSetBuilder = new HashSetBuilder + } + elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) + hashSetBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[A]): this.type = + if (switchedToHashSetBuilder) { + hashSetBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala new file mode 100644 index 000000000000..666d8c55bfb0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala @@ -0,0 +1,177 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder + +/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. + * + * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in + * sorted order, according to the map's [[scala.math.Ordering]]. + * + * @example {{{ + * import scala.collection.immutable.SortedMap + * + * // Make a SortedMap via the companion object factory + * val weekdays = SortedMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + */ +trait SortedMap[K, +V] + extends Map[K, V] + with collection.SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault[V1 >: V](d: K => V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) +} + +trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => + + protected def coll: C with CC[K, V] + + def unsorted: Map[K, V] + + override def keySet: SortedSet[K] = new ImmutableKeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = self.rangeImpl(from, until) + new map.ImmutableKeySortedSet + } + def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) + def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) + } + + // We override these methods to fix their return type (which would be `Map` otherwise) + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { + // Implementation has been copied from `MapOps` + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) +} + +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with collection.StrictOptimizedSortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = { + var result: CC[K, V2] = coll + val it = xs.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + override def from[K: Ordering, V](it: IterableOnce[(K, V)]): SortedMap[K, V] = it match { + case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm + case _ => super.from(it) + } + + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { + + implicit def ordering: Ordering[K] = underlying.ordering + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + new WithDefault( underlying.concat(xs) , defaultValue) + + override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala new file mode 100644 index 000000000000..303e5ea9658c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +/** Base trait for sorted sets */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with collection.StrictOptimizedSortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { +} + +/** + * $factoryInfo + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { + override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss + case _ => super.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Stream.scala b/tests/pos-special/stdlib/collection/immutable/Stream.scala new file mode 100644 index 000000000000..ae03641e97dd --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Stream.scala @@ -0,0 +1,568 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{ArrayBuffer, StringBuilder} +import scala.language.implicitConversions +import Stream.cons + +@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") +@SerialVersionUID(3L) +sealed abstract class Stream[+A] extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Stream, Stream[A]] + with IterableFactoryDefaults[A, Stream] + with Serializable { + def tail: Stream[A] + + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type + + override def iterableFactory: SeqFactory[Stream] = Stream + + override protected[this] def className: String = "Stream" + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying Stream as elements + * are consumed. + * @note This function will force the realization of the entire Stream + * unless the `f` throws an exception. + */ + @tailrec + override final def foreach[U](f: A => U): Unit = { + if (!this.isEmpty) { + f(head) + tail.foreach(f) + } + } + + @tailrec + override final def find(p: A => Boolean): Option[A] = { + if(isEmpty) None + else if(p(head)) Some(head) + else tail.find(p) + } + + override def take(n: Int): Stream[A] = { + if (n <= 0 || isEmpty) Stream.empty + else if (n == 1) new Stream.Cons(head, Stream.empty) + else new Stream.Cons(head, tail.take(n - 1)) + } + + /** Stream specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `Stream`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override final def foldLeft[B](z: B)(op: (B, A) => B): B = { + if (this.isEmpty) z + else tail.foldLeft(op(z, head))(op) + } + + /** The stream resulting from the concatenation of this stream with the argument stream. + * @param rest The collection that gets appended to this stream + * @return The stream containing elements of this stream and the iterable object. + */ + @deprecated("The `append` operation has been renamed `lazyAppendedAll`", "2.13.0") + @inline final def append[B >: A](rest: => IterableOnce[B]): Stream[B] = lazyAppendedAll(rest) + + protected[this] def writeReplace(): AnyRef = + if(nonEmpty && tailDefined) new Stream.SerializationProxy[A](this) else this + + /** Prints elements of this stream one by one, separated by commas. */ + @deprecated(message = """Use print(stream.force.mkString(", ")) instead""", since = "2.13.0") + @inline def print(): Unit = Console.print(this.force.mkString(", ")) + + /** Prints elements of this stream one by one, separated by `sep`. + * @param sep The separator string printed between consecutive elements. + */ + @deprecated(message = "Use print(stream.force.mkString(sep)) instead", since = "2.13.0") + @inline def print(sep: String): Unit = Console.print(this.force.mkString(sep)) + + /** The stream resulting from the concatenation of this stream with the argument stream. + * + * @param suffix The collection that gets appended to this stream + * @return The stream containing elements of this stream and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): Stream[B] = + if (isEmpty) iterableFactory.from(suffix) else cons[B](head, tail.lazyAppendedAll(suffix)) + + override def scanLeft[B](z: B)(op: (B, A) => B): Stream[B] = + if (isEmpty) z +: iterableFactory.empty + else cons(z, tail.scanLeft(op(z, head))(op)) + + /** Stream specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `Stream`. + * @return The accumulated value from successive applications of `f`. + */ + override final def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: Stream[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) + + override def filter(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = false) + + override def filterNot(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = true) + + private[immutable] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { + // optimization: drop leading prefix of elems for which f returns false + // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise + var rest: Stream[A] = coll + while (rest.nonEmpty && p(rest.head) == isFlipped) rest = rest.tail + // private utility func to avoid `this` on stack (would be needed for the lazy arg) + if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) + else iterableFactory.empty + } + + /** A `collection.WithFilter` which allows GC of the head of stream during processing */ + override final def withFilter(p: A => Boolean): collection.WithFilter[A, Stream] = + Stream.withFilter(coll, p) + + override final def prepended[B >: A](elem: B): Stream[B] = cons(elem, coll) + + override final def map[B](f: A => B): Stream[B] = + if (isEmpty) iterableFactory.empty + else cons(f(head), tail.map(f)) + + @tailrec override final def collect[B](pf: PartialFunction[A, B]): Stream[B] = + if(isEmpty) Stream.empty + else { + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + if(runWith(head)) Stream.collectedTail(newHead, this, pf) + else tail.collect(pf) + } + + @tailrec override final def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if(isEmpty) None + else { + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + if(runWith(head)) Some(newHead) + else tail.collectFirst(pf) + } + + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override final def flatMap[B](f: A => IterableOnce[B]): Stream[B] = + if (isEmpty) iterableFactory.empty + else { + // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty + var nonEmptyPrefix: Stream[A] = coll + var prefix = iterableFactory.from(f(nonEmptyPrefix.head)) + while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { + nonEmptyPrefix = nonEmptyPrefix.tail + if(!nonEmptyPrefix.isEmpty) + prefix = iterableFactory.from(f(nonEmptyPrefix.head)) + } + + if (nonEmptyPrefix.isEmpty) iterableFactory.empty + else prefix.lazyAppendedAll(nonEmptyPrefix.tail.flatMap(f)) + } + + override final def zip[B](that: collection.IterableOnce[B]): Stream[(A, B)] = + if (this.isEmpty || that.isEmpty) iterableFactory.empty + else { + val thatIterable = that match { + case that: collection.Iterable[B] => that + case _ => LazyList.from(that) + } + cons[(A, B)]((this.head, thatIterable.head), this.tail.zip(thatIterable.tail)) + } + + override final def zipWithIndex: Stream[(A, Int)] = this.zip(LazyList.from(0)) + + protected def tailDefined: Boolean + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * Undefined elements are represented with `"_"`, an undefined tail is represented with `"<not computed>"`, + * and cycles are represented with `"<cycle>"`. + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { + b.append(start) + if (nonEmpty) { + b.append(head) + var cursor = this + def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + if (tailDefined) { // If tailDefined, also !isEmpty + var scout = tail + if (cursor ne scout) { + cursor = scout + if (scout.tailDefined) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scout.tailDefined) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scout.tailDefined) scout = scout.tail + } + } + } + if (!scout.tailDefined) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + if (cursor.nonEmpty) { + appendCursorElement() + } + } + else { + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (runner ne scout) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if ((cursor eq scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + } + } + if (cursor.nonEmpty) { + // Either undefined or cyclic; we can check with tailDefined + if (!cursor.tailDefined) b.append(sep).append("") + else b.append(sep).append("") + } + } + b.append(end) + } + + /** + * @return a string representation of this collection. Undefined elements are + * represented with `"_"`, an undefined tail is represented with `"<not computed>"`, + * and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"Stream(_, <not computed>)"`, a non-empty stream, whose head has not been + * evaluated ; + * - `"Stream(_, 1, _, <not computed>)"`, a stream with at least three elements, + * the second one has been evaluated ; + * - `"Stream(1, 2, 3, <cycle>)"`, an infinite stream that contains + * a cycle at the fourth element. + */ + override def toString = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = isEmpty || { + if (!tailDefined) false + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") +@SerialVersionUID(3L) +object Stream extends SeqFactory[Stream] { + + /* !!! #11997 This `object cons` must be defined lexically *before* `class Cons` below. + * Otherwise it prevents Scala.js from building on Windows. + */ + /** An alternative way of building and matching Streams using Stream.cons(hd, tl). + */ + object cons { + /** A stream consisting of a given first element and remaining elements + * @param hd The first element of the result stream + * @param tl The remaining elements of the result stream + */ + def apply[A](hd: A, tl: => Stream[A]): Stream[A] = new Cons(hd, tl) + + /** Maps a stream to its head and tail */ + def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs) + } + + //@SerialVersionUID(3L) //TODO Putting an annotation on Stream.empty causes a cyclic dependency in unpickling + object Empty extends Stream[Nothing] { + override def isEmpty: Boolean = true + override def head: Nothing = throw new NoSuchElementException("head of empty stream") + override def tail: Stream[Nothing] = throw new UnsupportedOperationException("tail of empty stream") + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type = this + override def knownSize: Int = 0 + protected def tailDefined: Boolean = false + } + + @SerialVersionUID(3L) + final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] { + override def isEmpty: Boolean = false + @volatile private[this] var tlVal: Stream[A] = _ + @volatile private[this] var tlGen = () => tl + protected def tailDefined: Boolean = tlGen eq null + override def tail: Stream[A] = { + if (!tailDefined) + synchronized { + if (!tailDefined) { + tlVal = tlGen() + tlGen = null + } + } + tlVal + } + + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: Stream[A] = this + if (!these.isEmpty) these = these.tail + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + } + + implicit def toDeferrer[A](l: => Stream[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[Stream] (private val l: () => Stream[A]) extends AnyVal { + /** Construct a Stream consisting of a given first element followed by elements + * from another Stream. + */ + def #:: [B >: A](elem: B): Stream[B] = new Cons(elem, l()) + /** Construct a Stream consisting of the concatenation of the given Stream and + * another Stream. + */ + def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]): Stream[A] = coll match { + case coll: Stream[A] => coll + case _ => fromIterator(coll.iterator) + } + + /** + * @return A `Stream[A]` that gets its elements from the given `Iterator`. + * + * @param it Source iterator + * @tparam A type of elements + */ + // Note that the resulting `Stream` will be effectively iterable more than once because + // `Stream` memoizes its elements + def fromIterator[A](it: Iterator[A]): Stream[A] = + if (it.hasNext) { + new Stream.Cons(it.next(), fromIterator(it)) + } else Stream.Empty + + def empty[A]: Stream[A] = Empty + + override def newBuilder[A]: mutable.Builder[A, Stream[A]] = ArrayBuffer.newBuilder[A].mapResult(array => from(array)) + + private[immutable] def withFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean): collection.WithFilter[A, Stream] = + new WithFilter[A](l, p) + + private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] { + private[this] var s = l // set to null to allow GC after filtered + private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter + def map[B](f: A => B): Stream[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, Stream] = new WithFilter(filtered, q) + } + + /** An infinite Stream that repeatedly applies a given function to a start value. + * + * @param start the start value of the Stream + * @param f the function that's repeatedly applied + * @return the Stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A)(f: A => A): Stream[A] = { + cons(start, iterate(f(start))(f)) + } + + /** + * Create an infinite Stream starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the Stream + * @param step the increment value of the Stream + * @return the Stream starting at value `start`. + */ + def from(start: Int, step: Int): Stream[Int] = + cons(start, from(start + step, step)) + + /** + * Create an infinite Stream starting at `start` and incrementing by `1`. + * + * @param start the start value of the Stream + * @return the Stream starting at value `start`. + */ + def from(start: Int): Stream[Int] = from(start, 1) + + /** + * Create an infinite Stream containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting Stream + * @return the Stream containing an infinite number of elem + */ + def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) + + + private[Stream] def filteredTail[A](stream: Stream[A] @uncheckedVariance, p: A => Boolean, isFlipped: Boolean) = { + cons(stream.head, stream.tail.filterImpl(p, isFlipped)) + } + + private[Stream] def collectedTail[A, B](head: B, stream: Stream[A] @uncheckedVariance, pf: PartialFunction[A, B]) = { + cons(head, stream.tail.collect(pf)) + } + + /** This serialization proxy is used for Streams which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated streams without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + class SerializationProxy[A](@transient protected var coll: Stream[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while(these.nonEmpty && these.tailDefined) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new ArrayBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[Stream[A]] + coll = (init ++: tail) + } + + protected[this] def readResolve(): Any = coll + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..db5192edc36c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +/** + * Trait that overrides operations to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps[+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with collection.StrictOptimizedSeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A => B): C = { + if (lengthCompare(1) <= 0) coll + else { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B] + val it = this.iterator + var different = false + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next else different = true + } + if (different) builder.result() else coll + } + } + + override def updated[B >: A](index: Int, elem: B): CC[B] = { + if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})") + val b = iterableFactory.newBuilder[B] + if (knownSize >= 0) { + b.sizeHint(size) + } + var i = 0 + val it = iterator + while (i < index && it.hasNext) { + b += it.next() + i += 1 + } + if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})") + b += elem + it.next() + while (it.hasNext) b += it.next() + b.result() + } + + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = { + val b = iterableFactory.newBuilder[B] + var i = 0 + val it = iterator + while (i < from && it.hasNext) { + b += it.next() + i += 1 + } + b ++= other + i = replaced + while (i > 0 && it.hasNext) { + it.next() + i -= 1 + } + while (it.hasNext) b += it.next() + b.result() + } + + override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) + +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala new file mode 100644 index 000000000000..a51c7b9e7bf6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala @@ -0,0 +1,370 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{RedBlackTree => RB} +import scala.collection.mutable.ReusableBuilder +import scala.runtime.AbstractFunction2 + +/** An immutable SortedMap whose values are stored in a red-black tree. + * + * This class is optimal when range queries will be performed, + * or when traversal in order of an ordering is desired. + * If you only need key lookups, and don't care in which order key-values + * are traversed in, consider using * [[scala.collection.immutable.HashMap]], + * which will generally have better performance. If you need insertion order, + * consider a * [[scala.collection.immutable.SeqMap]], which does not need to + * have an ordering supplied. + * + * @example {{{ + * import scala.collection.immutable.TreeMap + * + * // Make a TreeMap via the companion object factory + * val weekdays = TreeMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * @param ordering the implicit ordering used to compare objects of type `A`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll immutable.TreeMap + * @define coll immutable tree map + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + def this()(implicit ordering: Ordering[K]) = this(null)(ordering) + private[immutable] def tree0: RB.Tree[K, V] = tree + + private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) + + override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap + + def iterator: Iterator[(K, V)] = RB.iterator(tree) + + def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) + + override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) + + def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) + + override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( + size, tree, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) + } + s.asInstanceOf[S with EfficientSplit] + } + + def get(key: K): Option[V] = RB.get(tree, key) + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val resultOrNull = RB.lookup(tree, key) + if (resultOrNull eq null) default + else resultOrNull.value + } + + def removed(key: K): TreeMap[K,V] = + newMapOrSelf(RB.delete(tree, key)) + + def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = + newMapOrSelf(RB.update(tree, key, value, overwrite = true)) + + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = + newMapOrSelf(that match { + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => + RB.union(tree, tm.tree) + case ls: LinearSeq[(K,V1)] => + if (ls.isEmpty) tree //to avoid the creation of the adder + else { + val adder = new Adder[V1] + adder.addAll(ls) + adder.finalTree + } + case _ => + val adder = new Adder[V1] + val it = that.iterator + while (it.hasNext) { + adder.apply(it.next()) + } + adder.finalTree + }) + + override def removedAll(keys: IterableOnce[K]): TreeMap[K, V] = keys match { + case ts: TreeSet[K] if ordering == ts.ordering => + newMapOrSelf(RB.difference(tree, ts.tree)) + case _ => super.removedAll(keys) + } + + /** A new TreeMap with the entry added is returned, + * assuming that key is not in the TreeMap. + * + * @tparam V1 type of the values of the new bindings, a supertype of `V` + * @param key the key to be inserted + * @param value the value to be associated with `key` + * @return a new $coll with the inserted binding, if it wasn't present in the map + */ + @deprecated("Use `updated` instead", "2.13.0") + def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { + assert(!RB.contains(tree, key)) + updated(key, value) + } + + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + override def size: Int = RB.count(tree) + override def knownSize: Int = size + + override def isEmpty = size == 0 + + override def firstKey: K = RB.smallest(tree).key + + override def lastKey: K = RB.greatest(tree).key + + override def head: (K, V) = { + val smallest = RB.smallest(tree) + (smallest.key, smallest.value) + } + + override def last: (K, V) = { + val greatest = RB.greatest(tree) + (greatest.key, greatest.value) + } + + override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) + + override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) + + override def drop(n: Int): TreeMap[K, V] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeMap(RB.drop(tree, n)) + } + + override def take(n: Int): TreeMap[K, V] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeMap(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int) = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeMap(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: ((K, V)) => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + + override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) + + override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) + + override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) + + override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = + newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) + + override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { + val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) + (newMapOrSelf(l), newMapOrSelf(r)) + } + + override def transform[W](f: (K, V) => W): TreeMap[K, W] = { + val t2 = RB.transform[K, V, W](tree, f) + if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] + else new TreeMap(t2) + } + + private final class Adder[B1 >: V] + extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { + private var currentMutableTree: RB.Tree[K,B1] = tree0 + def finalTree = beforePublish(currentMutableTree) + override def apply(kv: (K, B1)): Unit = { + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + } + @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { + if (!ls.isEmpty) { + val kv = ls.head + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + addAll(ls.tail) + } + } + } + override def equals(obj: Any): Boolean = obj match { + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeMap" +} + +/** $factoryInfo + * @define Coll immutable.TreeMap + * @define coll immutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() + + def from[K, V](it: IterableOnce[(K, V)])(implicit ordering: Ordering[K]): TreeMap[K, V] = + it match { + case tm: TreeMap[K, V] if ordering == tm.ordering => tm + case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => + new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) + case _ => + var t: RB.Tree[K, V] = null + val i = it.iterator + while (i.hasNext) { + val (k, v) = i.next() + t = RB.update(t, k, v, overwrite = true) + } + new TreeMap[K, V](t) + } + + def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] + + private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) + extends RB.MapHelper[K, V] + with ReusableBuilder[(K, V), TreeMap[K, V]] { + type Tree = RB.Tree[K, V] + private var tree:Tree = null + + def addOne(elem: (K, V)): this.type = { + tree = mutableUpd(tree, elem._1, elem._2) + this + } + private object adder extends AbstractFunction2[K, V, Unit] { + // we cache tree to avoid the outer access to tree + // in the hot path (apply) + private[this] var accumulator :Tree = null + def addForEach(hasForEach: collection.Map[K, V]): Unit = { + accumulator = tree + hasForEach.foreachEntry(this) + tree = accumulator + // be friendly to GC + accumulator = null + } + + override def apply(key: K, value: V): Unit = { + accumulator = mutableUpd(accumulator, key, value) + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeMap[K, V] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0) + case that: collection.Map[K, V] => + //add avoiding creation of tuples + adder.addForEach(that) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala new file mode 100644 index 000000000000..80bafb1cf3be --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -0,0 +1,649 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec + +/** This class implements an immutable map that preserves order using + * a hash map for the key to value mapping to provide efficient lookup, + * and a tree for the ordering of the keys to provide efficient + * insertion/modification order traversal and destructuring. + * + * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) + * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) + * can be used instead if so specified at creation. + * + * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method + * can be used to switch to the specified ordering for the returned map. + * + * A key can be manually refreshed (i.e. placed at the end) via the + * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in + * use). + * + * Internally, an ordinal counter is increased for each insertion/modification + * and then the current ordinal is used as key in the tree map. After 2^32^ + * insertions/modifications the entire map is copied (thus resetting the ordinal + * counter). + * + * @tparam K the type of the keys contained in this map. + * @tparam V the type of the values associated with the keys in this map. + * @define coll immutable tree seq map + * @define Coll `immutable.TreeSeqMap` + */ +final class TreeSeqMap[K, +V] private ( + private val ordering: TreeSeqMap.Ordering[K], + private val mapping: TreeSeqMap.Mapping[K, V], + private val ordinal: Int, + val orderedBy: TreeSeqMap.OrderBy) + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] + with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { + + import TreeSeqMap._ + + override protected[this] def className: String = "TreeSeqMap" + + override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap + + override val size = mapping.size + + override def knownSize: Int = size + + override def isEmpty = size == 0 + + /* + // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible + // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. + override def empty = TreeSeqMap.empty[K, V](orderedBy) + */ + + def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == this.orderedBy) this + else if (isEmpty) TreeSeqMap.empty(orderBy) + else new TreeSeqMap(ordering, mapping, ordinal, orderBy) + } + + def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { + mapping.get(key) match { + case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => + // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. + TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) + case Some((o, _)) if orderedBy == OrderBy.Insertion => + new TreeSeqMap( + ordering.include(o, key), + mapping.updated[(Int, V1)](key, (o, value)), + ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. + orderedBy) + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + case None => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + } + } + + def removed(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + new TreeSeqMap( + ordering.exclude(o), + mapping.removed(key), + ordinal, + orderedBy) + case None => + this + } + } + + def refresh(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping, + o1, + orderedBy) + case None => + this + } + } + + def get(key: K): Option[V] = mapping.get(key).map(value) + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): (K, V) = binding(iter.next()) + } + + override def keysIterator: Iterator[K] = new AbstractIterator[K] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): K = iter.next() + } + + override def valuesIterator: Iterator[V] = new AbstractIterator[V] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): V = value(binding(iter.next())) + } + + override def contains(key: K): Boolean = mapping.contains(key) + + override def head: (K, V) = binding(ordering.head) + + override def headOption = ordering.headOption.map(binding) + + override def last: (K, V) = binding(ordering.last) + + override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) + + override def tail: TreeSeqMap[K, V] = { + val (head, tail) = ordering.headTail + new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) + } + + override def init: TreeSeqMap[K, V] = { + val (init, last) = ordering.initLast + new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) + } + + override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { + val sz = size + if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) + else { + val sz = size + val f = if (from >= 0) from else 0 + val u = if (until <= sz) until else sz + val l = u - f + if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) + else if (l > sz / 2) { + // Remove front and rear incrementally if majority of elements are to be kept + val (front, rest) = ordering.splitAt(f) + val (ong, rear) = rest.splitAt(l) + var mng = this.mapping + val frontIter = front.iterator + while (frontIter.hasNext) { + mng = mng - frontIter.next() + } + val rearIter = rear.iterator + while (rearIter.hasNext) { + mng = mng - rearIter.next() + } + new TreeSeqMap(ong, mng, ordinal, orderedBy) + } else { + // Populate with builder otherwise + val bdr = newBuilder[K, V](orderedBy) + val iter = ordering.iterator + var i = 0 + while (i < f) { + iter.next() + i += 1 + } + while (i < u) { + val k = iter.next() + bdr.addOne((k, mapping(k)._2)) + i += 1 + } + bdr.result() + } + } + } + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val (k2, v2) = f((k, v)) + bdr.addOne((k2, v2)) + } + bdr.result() + } + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val jter = f((k, v)).iterator + while (jter.hasNext) { + val (k2, v2) = jter.next() + bdr.addOne((k2, v2)) + } + } + bdr.result() + } + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) + } + bdr.result() + } + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): TreeSeqMap[K, V2] = { + var ong: Ordering[K] = ordering + var mng: Mapping[K, V2] = mapping + var ord = increment(ordinal) + val iter = suffix.iterator + while (iter.hasNext) { + val (k, v2) = iter.next() + mng.get(k) match { + case Some((o, v)) => + if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) + else if (orderedBy == OrderBy.Modification) { + mng = mng.updated(k, (ord, v2)) + ong = ong.exclude(o).append(ord, k) + ord = increment(ord) + } + case None => + mng = mng.updated(k, (ord, v2)) + ong = ong.append(ord, k) + ord = increment(ord) + } + } + new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) + } + + @`inline` private[this] def value(p: (_, V)) = p._2 + @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) +} +object TreeSeqMap extends MapFactory[TreeSeqMap] { + sealed trait OrderBy + object OrderBy { + case object Insertion extends OrderBy + case object Modification extends OrderBy + } + + private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) + private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) + val Empty = EmptyByInsertion + def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) + def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == OrderBy.Modification) EmptyByModification + else EmptyByInsertion + }.asInstanceOf[TreeSeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): TreeSeqMap[K, V] = + it match { + case om: TreeSeqMap[K, V] => om + case _ => (newBuilder[K, V] ++= it).result() + } + + @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 + + def newBuilder[K, V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[K, V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + + final class Builder[K, V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + private[this] val bdr = new MapBuilderImpl[K, (Int, V)] + private[this] var ong = Ordering.empty[K] + private[this] var ord = 0 + private[this] var aliased: TreeSeqMap[K, V] = _ + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + bdr.getOrElse(key, null) match { + case (o, v) => + if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) + else if (orderedBy == OrderBy.Modification) { + bdr.addOne(key, (ord, value)) + ong = ong.exclude(o).appendInPlace(ord, key) + ord = increment(ord) + } + case null => + bdr.addOne(key, (ord, value)) + ong = ong.appendInPlace(ord, key) + ord = increment(ord) + } + } + this + } + + override def clear(): Unit = { + ong = Ordering.empty + ord = 0 + bdr.clear() + aliased = null + } + + override def result(): TreeSeqMap[K, V] = { + if (aliased eq null) { + aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) + } + aliased + } + } + + private type Mapping[K, +V] = Map[K, (Int, V)] + @annotation.unused + private val Mapping = Map + + /* The ordering implementation below is an adapted version of immutable.IntMap. */ + private[immutable] object Ordering { + import scala.collection.generic.BitOperations.Int._ + + @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" + + def empty[T] : Ordering[T] = Zero + + def apply[T](elems: (Int, T)*): Ordering[T] = + elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) + + // Iterator over a non-empty Ordering. + final class Iterator[+V](it: Ordering[V]) { + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 Bins and + // one Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 + private[this] var index = 0 + private[this] val buffer = new Array[AnyRef](33) + + private[this] def pop = { + index -= 1 + buffer(index).asInstanceOf[Ordering[V]] + } + + private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + + if (it != Zero) push(it) + + def hasNext = index != 0 + @tailrec + def next(): V = + pop match { + case Bin(_,_, Tip(_, v), right) => + push(right) + v + case Bin(_, _, left, right) => + push(right) + push(left) + next() + case Tip(_, v) => v + // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering + // and don't return an Ordering.Iterator for Ordering.Zero. + case Zero => throw new IllegalStateException("empty subtree not allowed") + } + } + + object Iterator { + val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) + def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] + } + + case object Zero extends Ordering[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any): Boolean = that match { + case _: this.type => true + case _: Ordering[_] => false // The only empty Orderings are eq Nil + case _ => super.equals(that) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" + } + + final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] + else Tip(ord, s) + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" + } + + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] + else Bin[S](prefix, mask, left, right) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { + sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" + left.format(sb, subPrefix + "├── ", subPrefix + "│ ") + right.format(sb, subPrefix + "└── ", subPrefix + " ") + } + } + + private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) Bin(p, m, t1, t2) + else Bin(p, m, t2, t1) + } + + private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { + case (l, Zero) => l + case (Zero, r) => r + case (l, r) => Bin(prefix, mask, l, r) + } + } + + sealed abstract class Ordering[+T] { + import Ordering._ + import scala.annotation.tailrec + import scala.collection.generic.BitOperations.Int._ + + override final def toString: String = format + final def format: String = { + val sb = new StringBuilder + format(sb, "", "") + sb.toString() + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit + + @tailrec + final def head: T = this match { + case Zero => throw new NoSuchElementException("head of empty map") + case Tip(k, v) => v + case Bin(_, _, l, _) => l.head + } + + @tailrec + final def headOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, l, _) => l.headOption + } + + @tailrec + final def last: T = this match { + case Zero => throw new NoSuchElementException("last of empty map") + case Tip(_, v) => v + case Bin(_, _, _, r) => r.last + } + + @tailrec + final def lastOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, _, r) => r.lastOption + } + + @tailrec + final def ordinal: Int = this match { + case Zero => 0 + case Tip(o, _) => o + case Bin(_, _, _, r) => r.ordinal + } + + final def tail: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("tail of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => bin(p, m, l.tail, r) + } + + final def headTail: (T, Ordering[T]) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (v, Zero) + case Bin(p, m, l, r) => + val (head, tail) = l.headTail + (head, bin(p, m, tail, r)) + } + + final def init: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => + bin(p, m, l, r.init) + } + + final def initLast: (Ordering[T], T) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (Zero, v) + case Bin(p, m, l, r) => + val (init, last) = r.initLast + (bin(p, m, l, init), last) + } + + final def iterator: Iterator[T] = this match { + case Zero => Iterator.empty + case _ => new Iterator(this) + } + + final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) + else Bin(p, m, l, r.include(ordinal, value)) + } + + final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else Bin(p, m, l, r.append(ordinal, value)) + } + + @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) + private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) if o >= ordinal => + throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") + case Tip(o, _) if parent == null => + join(ordinal, Tip(ordinal, value), o, this) + case Tip(o, _) => + parent.right = join(ordinal, Tip(ordinal, value), o, this) + parent + case b @ Bin(p, m, _, r) => + if (!hasMatch(ordinal, p, m)) { + val b2 = join(ordinal, Tip(ordinal, value), p, this) + if (parent != null) { + parent.right = b2 + parent + } else b2 + } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else { + r.appendInPlace1(b, ordinal, value) + this + } + } + + final def exclude(ordinal: Int): Ordering[T] = this match { + case Zero => + Zero + case Tip(o, _) => + if (ordinal == o) Zero + else this + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) this + else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) + else bin(p, m, l, r.exclude(ordinal)) + } + + final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { + var rear = Ordering.empty[T] + var i = n + (modifyOrRemove { (o, v) => + i -= 1 + if (i >= 0) Some(v) + else { + rear = rear.appendInPlace(o, v) + None + } + }, rear) + } + + /** + * A combined transform and filter function. Returns an `Ordering` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value) == Some(x)` the + * map contains `(key, x)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { + case Zero => Zero + case Tip(key, value) => + f(key, value) match { + case None => Zero + case Some(value2) => + // hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] + else Tip(key, value2) + } + case Bin(prefix, mask, left, right) => + val l = left.modifyOrRemove(f) + val r = right.modifyOrRemove(f) + if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] + else bin(prefix, mask, l, r) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala new file mode 100644 index 000000000000..f0be91b72acc --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala @@ -0,0 +1,296 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.collection.immutable.{RedBlackTree => RB} +import scala.runtime.AbstractFunction1 + + +/** This class implements immutable sorted sets using a tree. + * + * @tparam A the type of the elements contained in this tree set + * @param ordering the implicit ordering used to compare objects of type `A` + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) throw new NullPointerException("ordering must not be null") + + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) + + override def sortedIterableFactory = TreeSet + + private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) + + override def size: Int = RB.count(tree) + + override def isEmpty = size == 0 + + override def head: A = RB.smallest(tree).key + + override def last: A = RB.greatest(tree).key + + override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) + + override def init: TreeSet[A] = new TreeSet(RB.init(tree)) + + override def min[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + head + } else { + super.min(ord) + } + } + + override def max[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + last + } else { + super.max(ord) + } + } + + override def drop(n: Int): TreeSet[A] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeSet(RB.drop(tree, n)) + } + + override def take(n: Int): TreeSet[A] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeSet(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int): TreeSet[A] = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeSet(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: A => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) + + override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) + + override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + override def minAfter(key: A): Option[A] = { + val v = RB.minAfter(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + override def maxBefore(key: A): Option[A] = { + val v = RB.maxBefore(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + def iterator: Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[A, Any] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Checks if this set contains element `elem`. + * + * @param elem the element to check for membership. + * @return true, iff `elem` is contained in this set. + */ + def contains(elem: A): Boolean = RB.contains(tree, elem) + + override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) + + /** Creates a new `TreeSet` with the entry added. + * + * @param elem a new element to add. + * @return a new $coll containing `elem` and all the elements of this $coll. + */ + def incl(elem: A): TreeSet[A] = + newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) + + /** Creates a new `TreeSet` with the entry removed. + * + * @param elem a new element to add. + * @return a new $coll containing all the elements of this $coll except `elem`. + */ + def excl(elem: A): TreeSet[A] = + newSetOrSelf(RB.delete(tree, elem)) + + override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { + val t = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + RB.union(tree, ts.tree) + case _ => + val it = that.iterator + var t = tree + while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) + t + } + newSetOrSelf(t) + } + + override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + //TODO add an implementation of a mutable subtractor similar to TreeMap + //but at least this doesn't create a TreeSet for each iteration + object sub extends AbstractFunction1[A, Unit] { + var currentTree = tree + override def apply(k: A): Unit = { + currentTree = RB.delete(currentTree, k) + } + } + that.iterator.foreach(sub) + newSetOrSelf(sub.currentTree) + } + + override def intersect(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.intersect(tree, ts.tree)) + case _ => + super.intersect(that) + } + + override def diff(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + super.diff(that) + } + + override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) + + override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { + val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) + (newSetOrSelf(l), newSetOrSelf(r)) + } + + override def equals(obj: Any): Boolean = obj match { + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeSet" +} + +/** + * $factoryInfo + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] + + def from[E](it: scala.collection.IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => ts + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) + // The cast is needed to compile with Dotty: + // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound + new TreeSet[E](tree) + case _ => + var t: RB.Tree[E, Null] = null + val i = it.iterator + while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] + private class TreeSetBuilder[A](implicit ordering: Ordering[A]) + extends RB.SetHelper[A] + with ReusableBuilder[A, TreeSet[A]] { + type Tree = RB.Tree[A, Any] + private [this] var tree:RB.Tree[A, Any] = null + + override def addOne(elem: A): this.type = { + tree = mutableUpd(tree, elem) + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeSet[A] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree + else tree = RB.union(beforePublish(tree), ts.tree)(ordering) + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala new file mode 100644 index 000000000000..aa3fac5acd69 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -0,0 +1,2474 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.Math.{abs, max => mmax, min => mmin} +import java.util.Arrays.{copyOf, copyOfRange} +import java.util.{Arrays, Spliterator} + +import scala.annotation.switch +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.VectorInline._ +import scala.collection.immutable.VectorStatics._ +import scala.collection.mutable.ReusableBuilder + + +/** $factoryInfo + * @define Coll `Vector` + * @define coll vector + */ +@SerialVersionUID(3L) +object Vector extends StrictOptimizedSeqFactory[Vector] { + + def empty[A]: Vector[A] = Vector0 + + def from[E](it: collection.IterableOnce[E]): Vector[E] = + it match { + case v: Vector[E] => v + case _ => + val knownSize = it.knownSize + if (knownSize == 0) empty[E] + else if (knownSize > 0 && knownSize <= WIDTH) { + val a1: Arr1 = it match { + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => + as.unsafeArray.asInstanceOf[Arr1] + case it: Iterable[E] => + val a1 = new Arr1(knownSize) + it.copyToArray(a1.asInstanceOf[Array[Any]]) + a1 + case _ => + val a1 = new Arr1(knownSize) + it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) + a1.asInstanceOf[Arr1] + } + new Vector1[E](a1) + } else { + (newBuilder ++= it).result() + } + } + + def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] + + /** Create a Vector with the same element at each index. + * + * Unlike `fill`, which takes a by-name argument for the value and can thereby + * compute different values for each index, this method guarantees that all + * elements are identical. This allows sparse allocation in O(log n) time and space. + */ + private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { + //TODO Make public; this method is private for now because it is not forward binary compatible + if(n <= 0) Vector0 + else { + val b = new VectorBuilder[A] + b.initSparse(n, elem) + b.result() + } + } + + private val defaultApplyPreferredMaxLength: Int = + try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", + "250").toInt + catch { + case _: SecurityException => 250 + } + + private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) +} + + +/** Vector is a general-purpose, immutable data structure. It provides random access and updates + * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). + * Because vectors strike a good balance between fast random selections and fast random functional updates, + * they are currently the default implementation of immutable indexed sequences. + * + * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass + * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the + * top level). + * + * Tree balancing: + * - Only the first dimension of an array may have a size < WIDTH + * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up + * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 + * - `prefix1` and `suffix1` are never empty + * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches + * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty + * - All arrays are left-aligned and truncated + * + * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running + * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. + */ +sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, Vector, Vector[A]] + with StrictOptimizedSeqOps[A, Vector, Vector[A]] + with IterableFactoryDefaults[A, Vector] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Vector] = Vector + + override final def length: Int = + if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 + else prefix1.length + + override final def iterator: Iterator[A] = + if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator + else new NewVectorIterator(this, length, vectorSliceCount) + + override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { + var i = 0 + val len = prefix1.length + while (i != len) { + if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { + // each 1 bit indicates that index passes the filter. + // all indices < i are also assumed to pass the filter + var bitmap = 0 + var j = i + 1 + while (j < len) { + if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { + bitmap |= (1 << j) + } + j += 1 + } + val newLen = i + java.lang.Integer.bitCount(bitmap) + + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + var k = 0 + while(k < i) { + b.addOne(prefix1(k).asInstanceOf[A]) + k += 1 + } + k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + b.addOne(prefix1(k).asInstanceOf[A]) + i += 1 + } + k += 1 + } + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + return b.result() + } else { + if (newLen == 0) return Vector0 + val newData = new Array[AnyRef](newLen) + System.arraycopy(prefix1, 0, newData, 0, i) + var k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + newData(i) = prefix1(k) + i += 1 + } + k += 1 + } + return new Vector1[A](newData) + } + } + i += 1 + } + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + b.initFrom(prefix1) + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + b.result() + } else this + } + + // Dummy overrides to refine result types for binary compatibility: + override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) + override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) + override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): Vector[B] = { + val k = prefix.knownSize + if (k == 0) this + else if (k < 0) super.prependedAll(prefix) + else prependedAll0(prefix, k) + } + + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]): Vector[B] = { + val k = suffix.knownSize + if (k == 0) this + else if (k < 0) super.appendedAll(suffix) + else appendedAll0(suffix, k) + } + + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = { + // k >= 0, k = prefix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { + var v: Vector[B] = this + val it = IndexedSeq.from(prefix).reverseIterator + while (it.hasNext) v = it.next() +: v + v + } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { + var v = prefix.asInstanceOf[Vector[B]] + val it = this.iterator + while (it.hasNext) v = v :+ it.next() + v + } else if (k < this.size - AlignToFaster) { + new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() + } else super.prependedAll(prefix) + } + + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + // k >= 0, k = suffix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit) { + var v: Vector[B] = this + suffix match { + case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) + case _ => suffix.iterator.foreach(x => v = v.appended(x)) + } + v + } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { + var v = suffix.asInstanceOf[Vector[B]] + val ri = this.reverseIterator + while (ri.hasNext) v = v.prepended(ri.next()) + v + } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { + val v = suffix.asInstanceOf[Vector[B]] + new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() + } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() + } + + override def className = "Vector" + + @inline override final def take(n: Int): Vector[A] = slice(0, n) + @inline override final def drop(n: Int): Vector[A] = slice(n, length) + @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) + @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) + override def tail: Vector[A] = slice(1, length) + override def init: Vector[A] = slice(0, length-1) + + /** Like slice but parameters must be 0 <= lo < hi < length */ + protected[this] def slice0(lo: Int, hi: Int): Vector[A] + + /** Number of slices */ + protected[immutable] def vectorSliceCount: Int + /** Slice at index */ + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] + /** Length of all slices up to and including index */ + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + + override def toVector: Vector[A] = this + + override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + val s = shape.shape match { + case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) + case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) + case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) + case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) + } + s.asInstanceOf[S with EfficientSplit] + } + + protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") + + override final def head: A = + if (prefix1.length == 0) throw new NoSuchElementException("empty.head") + else prefix1(0).asInstanceOf[A] + + override final def last: A = { + if(this.isInstanceOf[BigVector[_]]) { + val suffix = this.asInstanceOf[BigVector[_]].suffix1 + if(suffix.length == 0) throw new NoSuchElementException("empty.tail") + else suffix(suffix.length-1) + } else prefix1(prefix1.length-1) + }.asInstanceOf[A] + + override final def foreach[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 0 + while (i < c) { + foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) + i += 1 + } + } + + // The following definitions are needed for binary compatibility with ParVector + private[collection] def startIndex: Int = 0 + private[collection] def endIndex: Int = length + private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = + s.it = iterator.asInstanceOf[NewVectorIterator[B]] +} + + +/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ +private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { + + override final def slice(from: Int, until: Int): Vector[A] = { + val lo = mmax(from, 0) + val hi = mmin(until, length) + if (hi <= lo) Vector0 + else if (hi - lo == length) this + else slice0(lo, hi) + } +} + + +/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ +private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { + + protected[immutable] final def foreachRest[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 1 + while(i < c) { + foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) + i += 1 + } + } +} + + +/** Empty vector */ +private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { + + def apply(index: Int): Nothing = throw ioob(index) + + override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) + + override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def map[B](f: Nothing => B): Vector[B] = this + + override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") + + override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") + + protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this + + protected[immutable] def vectorSliceCount: Int = 0 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 + + override def equals(o: Any): Boolean = { + if(this eq o.asInstanceOf[AnyRef]) true + else o match { + case that: Vector[_] => false + case o => super.equals(o) + } + } + + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + Vector.from(prefix) + + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B], k: Int): Vector[B] = + Vector.from(suffix) + + override protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") +} + +/** Flat ArraySeq-like structure */ +private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { + + @inline def apply(index: Int): A = { + if(index >= 0 && index < prefix1.length) + prefix1(index).asInstanceOf[A] + else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < prefix1.length) + new Vector1(copyUpdate(prefix1, index, elem)) + else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) + else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) + else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) + } + + override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = + new Vector1(copyOfRange(prefix1, lo, hi)) + + override def tail: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyTail(prefix1)) + + override def init: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyInit(prefix1)) + + protected[immutable] def vectorSliceCount: Int = 1 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case data1b => new Vector1(data1b) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val data1b = append1IfSpace(prefix1, suffix) + if(data1b ne null) new Vector1(data1b) + else super.appendedAll0(suffix, k) + } +} + + +/** 2-dimensional radix-balanced finger tree */ +private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val data2: Arr2, + _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + data2: Arr2 = data2, + suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector2(prefix1, len1, data2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1 + if(io >= 0) { + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) data2(i2)(i1) + else suffix1(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1) { + val io = index - len1 + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) + else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) + else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, data2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 3 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => data2 + case 2 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => length0 - suffix1.length + case 2 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 3-dimensional radix-balanced finger tree */ +private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val data3: Arr3, + private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + data3: Arr3 = data3, + suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12 + if(io >= 0) { + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i3 < data3.length) data3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12) { + val io = index - len12 + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) + else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) + else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), + data3 = mapElems(3, data3, f), + suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, data3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 5 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => data3 + case 3 => suffix2 + case 4 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len12 + data3.length*WIDTH2 + case 3 => length0 - suffix1.length + case 4 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 4-dimensional radix-balanced finger tree */ +private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val data4: Arr4, + private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + data4: Arr4 = data4, + suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len123 + if(io >= 0) { + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i4 < data4.length) data4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len123) { + val io = index - len123 + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) + else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) + else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), + data4 = mapElems(4, data4, f), + suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, data4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 7 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => data4 + case 4 => suffix3 + case 5 => suffix2 + case 6 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len123 + data4.length*WIDTH3 + case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 + case 5 => length0 - suffix1.length + case 6 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 5-dimensional radix-balanced finger tree */ +private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val data5: Arr5, + private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + data5: Arr5 = data5, + suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1234 + if(io >= 0) { + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1234) { + val io = index - len1234 + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) + else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) + else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), + data5 = mapElems(5, data5, f), + suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, data5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 9 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => data5 + case 5 => suffix4 + case 6 => suffix3 + case 7 => suffix2 + case 8 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len1234 + data5.length*WIDTH4 + case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 7 => length0 - suffix1.length + case 8 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 6-dimensional radix-balanced finger tree */ +private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, + private[immutable] val data6: Arr6, + private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + prefix5: Arr5 = prefix5, len12345: Int = len12345, + data6: Arr6 = data6, + suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12345 + if(io >= 0) { + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) + else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1234) { + val io = index - len1234 + prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12345) { + val io = index - len12345 + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) + else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1234) { + val io = index - len1234 + copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), + data6 = mapElems(6, data6, f), + suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, prefix5) + b.consider(6, data6) + b.consider(5, suffix5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 11 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => prefix5 + case 5 => data6 + case 6 => suffix5 + case 7 => suffix4 + case 8 => suffix3 + case 9 => suffix2 + case 10 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len12345 + case 5 => len12345 + data6.length*WIDTH5 + case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 9 => length0 - suffix1.length + case 10 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + len12345 = len12345 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** Helper class for vector slicing. It is initialized with the validated start and end index, + * then the vector slices are added in succession with `consider`. No matter what the dimension + * of the originating vector is or where the cut is performed, this always results in a + * structure with the highest-dimensional data in the middle and fingers of decreasing dimension + * at both ends, which can be turned into a new vector with very little rebalancing. + */ +private final class VectorSliceBuilder(lo: Int, hi: Int) { + //println(s"***** VectorSliceBuilder($lo, $hi)") + + private[this] val slices = new Array[Array[AnyRef]](11) + private[this] var len, pos, maxDim = 0 + + @inline private[this] def prefixIdx(n: Int) = n-1 + @inline private[this] def suffixIdx(n: Int) = 11-n + + def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** consider($n, /${a.length})") + val count = a.length * (1 << (BITS*(n-1))) + val lo0 = mmax(lo-pos, 0) + val hi0 = mmin(hi-pos, count) + if(hi0 > lo0) { + addSlice(n, a, lo0, hi0) + len += (hi0 - lo0) + } + pos += count + } + + private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { + //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") + if(n == 1) { + add(1, copyOrUse(a, lo, hi)) + } else { + val bitsN = BITS * (n-1) + val widthN = 1 << bitsN + val loN = lo >>> bitsN + val hiN = hi >>> bitsN + val loRest = lo & (widthN - 1) + val hiRest = hi & (widthN - 1) + //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") + if(loRest == 0) { + if(hiRest == 0) { + add(n, copyOrUse(a, loN, hiN)) + } else { + if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } else { + if(hiN == loN) { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) + } else { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) + if(hiRest == 0) { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + } else { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } + } + } + } + + private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** add($n, /${a.length})") + val idx = + if(n <= maxDim) suffixIdx(n) + else { maxDim = n; prefixIdx(n) } + slices(idx) = a.asInstanceOf[Array[AnyRef]] + } + + def result[A](): Vector[A] = { + //println(s"***** result: $len, $maxDim") + if(len <= 32) { + if(len == 0) Vector0 + else { + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") + val a: Arr1 = + if(prefix1 ne null) { + if(suffix1 ne null) concatArrays(prefix1, suffix1) + else prefix1 + } else if(suffix1 ne null) suffix1 + else { + val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] + if(prefix2 ne null) prefix2(0) + else { + val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] + suffix2(0) + } + } + new Vector1(a) + } + } else { + balancePrefix(1) + balanceSuffix(1) + var resultDim = maxDim + if(resultDim < 6) { + val pre = slices(prefixIdx(maxDim)) + val suf = slices(suffixIdx(maxDim)) + if((pre ne null) && (suf ne null)) { + // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, + // otherwise increase the dimension + if(pre.length + suf.length <= WIDTH-2) { + slices(prefixIdx(maxDim)) = concatArrays(pre, suf) + slices(suffixIdx(maxDim)) = null + } else resultDim += 1 + } else { + // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we + // only allow WIDTH-2 for the main data, so increase the dimension in this case + val one = if(pre ne null) pre else suf + if(one.length > WIDTH-2) resultDim += 1 + } + } + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + val len1 = prefix1.length + val res = (resultDim: @switch) match { + case 2 => + val data2 = dataOr(2, empty2) + new Vector2[A](prefix1, len1, data2, suffix1, len) + case 3 => + val prefix2 = prefixOr(2, empty2) + val data3 = dataOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) + case 4 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val data4 = dataOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) + case 5 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val data5 = dataOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) + case 6 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val prefix5 = prefixOr(5, empty5) + val data6 = dataOr(6, empty6) + val suffix5 = suffixOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + val len12345 = len1234 + (prefix5.length * WIDTH4) + new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) + } + res + } + } + + @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] else a + } + + @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + + @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] + else { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + } + + /** Ensure prefix is not empty */ + private[this] def balancePrefix(n: Int): Unit = { + if(slices(prefixIdx(n)) eq null) { + if(n == maxDim) { + slices(prefixIdx(n)) = slices(suffixIdx(n)) + slices(suffixIdx(n)) = null + } else { + balancePrefix(n+1) + val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(preN1 ne null) + slices(prefixIdx(n)) = preN1(0) + if(preN1.length == 1) { + slices(prefixIdx(n+1)) = null + if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n + } else { + slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] + } + } + } + } + + /** Ensure suffix is not empty */ + private[this] def balanceSuffix(n: Int): Unit = { + if(slices(suffixIdx(n)) eq null) { + if(n == maxDim) { + slices(suffixIdx(n)) = slices(prefixIdx(n)) + slices(prefixIdx(n)) = null + } else { + balanceSuffix(n+1) + val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") + slices(suffixIdx(n)) = sufN1(sufN1.length-1) + if(sufN1.length == 1) { + slices(suffixIdx(n+1)) = null + if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n + } else { + slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] + } + } + } + } + + override def toString: String = + s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" + + private[immutable] def getSlices: Array[Array[AnyRef]] = slices +} + + +final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { + + private[this] var a6: Arr6 = _ + private[this] var a5: Arr5 = _ + private[this] var a4: Arr4 = _ + private[this] var a3: Arr3 = _ + private[this] var a2: Arr2 = _ + private[this] var a1: Arr1 = new Arr1(WIDTH) + private[this] var len1, lenRest, offset = 0 + private[this] var prefixIsRightAligned = false + private[this] var depth = 1 + + @inline private[this] final def setLen(i: Int): Unit = { + len1 = i & MASK + lenRest = i - len1 + } + + override def knownSize: Int = len1 + lenRest - offset + + @inline def size: Int = knownSize + @inline def isEmpty: Boolean = knownSize == 0 + @inline def nonEmpty: Boolean = knownSize != 0 + + def clear(): Unit = { + a6 = null + a5 = null + a4 = null + a3 = null + a2 = null + a1 = new Arr1(WIDTH) + len1 = 0 + lenRest = 0 + offset = 0 + prefixIsRightAligned = false + depth = 1 + } + + private[immutable] def initSparse(size: Int, elem: A): Unit = { + setLen(size) + Arrays.fill(a1, elem) + if(size > WIDTH) { + a2 = new Array(WIDTH) + Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) + if(size > WIDTH2) { + a3 = new Array(WIDTH) + Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) + if(size > WIDTH3) { + a4 = new Array(WIDTH) + Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) + if(size > WIDTH4) { + a5 = new Array(WIDTH) + Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) + if(size > WIDTH5) { + a6 = new Array(LASTWIDTH) + Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) + depth = 6 + } else depth = 5 + } else depth = 4 + } else depth = 3 + } else depth = 2 + } else depth = 1 + } + + private[immutable] def initFrom(prefix1: Arr1): Unit = { + depth = 1 + setLen(prefix1.length) + a1 = copyOrUse(prefix1, 0, WIDTH) + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + } + + private[immutable] def initFrom(v: Vector[_]): this.type = { + (v.vectorSliceCount: @switch) match { + case 0 => + case 1 => + val v1 = v.asInstanceOf[Vector1[_]] + depth = 1 + setLen(v1.prefix1.length) + a1 = copyOrUse(v1.prefix1, 0, WIDTH) + case 3 => + val v2 = v.asInstanceOf[Vector2[_]] + val d2 = v2.data2 + a1 = copyOrUse(v2.suffix1, 0, WIDTH) + depth = 2 + offset = WIDTH - v2.len1 + setLen(v2.length0 + offset) + a2 = new Arr2(WIDTH) + a2(0) = v2.prefix1 + System.arraycopy(d2, 0, a2, 1, d2.length) + a2(d2.length+1) = a1 + case 5 => + val v3 = v.asInstanceOf[Vector3[_]] + val d3 = v3.data3 + val s2 = v3.suffix2 + a1 = copyOrUse(v3.suffix1, 0, WIDTH) + depth = 3 + offset = WIDTH2 - v3.len12 + setLen(v3.length0 + offset) + a3 = new Arr3(WIDTH) + a3(0) = copyPrepend(v3.prefix1, v3.prefix2) + System.arraycopy(d3, 0, a3, 1, d3.length) + a2 = copyOf(s2, WIDTH) + a3(d3.length+1) = a2 + a2(s2.length) = a1 + case 7 => + val v4 = v.asInstanceOf[Vector4[_]] + val d4 = v4.data4 + val s3 = v4.suffix3 + val s2 = v4.suffix2 + a1 = copyOrUse(v4.suffix1, 0, WIDTH) + depth = 4 + offset = WIDTH3 - v4.len123 + setLen(v4.length0 + offset) + a4 = new Arr4(WIDTH) + a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) + System.arraycopy(d4, 0, a4, 1, d4.length) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a4(d4.length+1) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 9 => + val v5 = v.asInstanceOf[Vector5[_]] + val d5 = v5.data5 + val s4 = v5.suffix4 + val s3 = v5.suffix3 + val s2 = v5.suffix2 + a1 = copyOrUse(v5.suffix1, 0, WIDTH) + depth = 5 + offset = WIDTH4 - v5.len1234 + setLen(v5.length0 + offset) + a5 = new Arr5(WIDTH) + a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) + System.arraycopy(d5, 0, a5, 1, d5.length) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a5(d5.length+1) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 11 => + val v6 = v.asInstanceOf[Vector6[_]] + val d6 = v6.data6 + val s5 = v6.suffix5 + val s4 = v6.suffix4 + val s3 = v6.suffix3 + val s2 = v6.suffix2 + a1 = copyOrUse(v6.suffix1, 0, WIDTH) + depth = 6 + offset = WIDTH5 - v6.len12345 + setLen(v6.length0 + offset) + a6 = new Arr6(LASTWIDTH) + a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) + System.arraycopy(d6, 0, a6, 1, d6.length) + a5 = copyOf(s5, WIDTH) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a6(d6.length+1) = a5 + a5(s5.length) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + } + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + this + } + + //TODO Make public; this method is only private for binary compatibility + private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { + if (len1 != 0 || lenRest != 0) + throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") + val (prefixLength, maxPrefixLength) = bigVector match { + case Vector0 => (0, 1) + case v1: Vector1[_] => (0, 1) + case v2: Vector2[_] => (v2.len1, WIDTH) + case v3: Vector3[_] => (v3.len12, WIDTH2) + case v4: Vector4[_] => (v4.len123, WIDTH3) + case v5: Vector5[_] => (v5.len1234, WIDTH4) + case v6: Vector6[_] => (v6.len12345, WIDTH5) + } + if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector + val overallPrefixLength = (before + prefixLength) % maxPrefixLength + offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength + // pretend there are already `offset` elements added + advanceN(offset & ~MASK) + len1 = offset & MASK + prefixIsRightAligned = true + this + } + + /** + * Removes `offset` leading `null`s in the prefix. + * This is needed after calling `alignTo` and subsequent additions, + * directly before the result is used for creating a new Vector. + * Note that the outermost array keeps its length to keep the + * Builder re-usable. + * + * example: + * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) + * becomes + * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) + */ + private[this] def leftAlignPrefix(): Unit = { + @inline def shrinkOffsetIfToLarge(width: Int): Unit = { + val newOffset = offset % width + lenRest -= offset - newOffset + offset = newOffset + } + var a: Array[AnyRef] = null // the array we modify + var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a + if (depth >= 6) { + a = a6.asInstanceOf[Array[AnyRef]] + val i = offset >>> BITS5 + if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) + shrinkOffsetIfToLarge(WIDTH5) + if ((lenRest >>> BITS5) == 0) depth = 5 + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 5) { + if (a == null) a = a5.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS4) & MASK + if (depth == 5) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a5 = a.asInstanceOf[Arr5] + shrinkOffsetIfToLarge(WIDTH4) + if ((lenRest >>> BITS4) == 0) depth = 4 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 4) { + if (a == null) a = a4.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS3) & MASK + if (depth == 4) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a4 = a.asInstanceOf[Arr4] + shrinkOffsetIfToLarge(WIDTH3) + if ((lenRest >>> BITS3) == 0) depth = 3 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 3) { + if (a == null) a = a3.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS2) & MASK + if (depth == 3) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a3 = a.asInstanceOf[Arr3] + shrinkOffsetIfToLarge(WIDTH2) + if ((lenRest >>> BITS2) == 0) depth = 2 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 2) { + if (a == null) a = a2.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS) & MASK + if (depth == 2) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a2 = a.asInstanceOf[Arr2] + shrinkOffsetIfToLarge(WIDTH) + if ((lenRest >>> BITS) == 0) depth = 1 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 1) { + if (a == null) a = a1.asInstanceOf[Array[AnyRef]] + val i = offset & MASK + if (depth == 1) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a1 = a.asInstanceOf[Arr1] + len1 -= offset + offset = 0 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + } + prefixIsRightAligned = false + } + + def addOne(elem: A): this.type = { + if(len1 == WIDTH) advance() + a1(len1) = elem.asInstanceOf[AnyRef] + len1 += 1 + this + } + + private[this] def addArr1(data: Arr1): Unit = { + val dl = data.length + if(dl > 0) { + if(len1 == WIDTH) advance() + val copy1 = mmin(WIDTH-len1, dl) + val copy2 = dl - copy1 + System.arraycopy(data, 0, a1, len1, copy1) + len1 += copy1 + if(copy2 > 0) { + advance() + System.arraycopy(data, copy1, a1, 0, copy2) + len1 += copy2 + } + } + } + + private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { +// assert(dim >= 2) +// assert(lenRest % WIDTH == 0) +// assert(len1 == 0 || len1 == WIDTH) + if (slice.isEmpty) return + if (len1 == WIDTH) advance() + val sl = slice.length + (dim: @switch) match { + case 2 => + // lenRest is always a multiple of WIDTH + val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS) & MASK + System.arraycopy(slice, 0, a2, destPos, copy1) + advanceN(WIDTH * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a2, 0, copy2) + advanceN(WIDTH * copy2) + } + case 3 => + if (lenRest % WIDTH2 != 0) { + // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) + return + } + val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS2) & MASK + System.arraycopy(slice, 0, a3, destPos, copy1) + advanceN(WIDTH2 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a3, 0, copy2) + advanceN(WIDTH2 * copy2) + } + case 4 => + if (lenRest % WIDTH3 != 0) { + // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) + return + } + val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS3) & MASK + System.arraycopy(slice, 0, a4, destPos, copy1) + advanceN(WIDTH3 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a4, 0, copy2) + advanceN(WIDTH3 * copy2) + } + case 5 => + if (lenRest % WIDTH4 != 0) { + // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) + return + } + val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS4) & MASK + System.arraycopy(slice, 0, a5, destPos, copy1) + advanceN(WIDTH4 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a5, 0, copy2) + advanceN(WIDTH4 * copy2) + } + case 6 => // note width is now LASTWIDTH + if (lenRest % WIDTH5 != 0) { + // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) + return + } + val copy1 = sl + // there is no copy2 because there can't be another a6 to copy to + val destPos = lenRest >>> BITS5 + if (destPos + copy1 > LASTWIDTH) + throw new IllegalArgumentException("exceeding 2^31 elements") + System.arraycopy(slice, 0, a6, destPos, copy1) + advanceN(WIDTH5 * copy1) + } + } + + private[this] def addVector(xs: Vector[A]): this.type = { + val sliceCount = xs.vectorSliceCount + var sliceIdx = 0 + while(sliceIdx < sliceCount) { + val slice = xs.vectorSlice(sliceIdx) + vectorSliceDim(sliceCount, sliceIdx) match { + case 1 => addArr1(slice.asInstanceOf[Arr1]) + case n if len1 == WIDTH || len1 == 0 => + addArrN(slice.asInstanceOf[Array[AnyRef]], n) + case n => foreachRec(n-2, slice, addArr1) + } + sliceIdx += 1 + } + this + } + + override def addAll(xs: IterableOnce[A]): this.type = xs match { + case v: Vector[_] => + if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) + else addVector(v.asInstanceOf[Vector[A]]) + case _ => + super.addAll(xs) + } + + private[this] def advance(): Unit = { + val idx = lenRest + WIDTH + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advanceN(n: Int): Unit = if (n > 0) { + // assert(n % 32 == 0) + val idx = lenRest + n + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advance1(idx: Int, xor: Int): Unit = { + if (xor <= 0) { // level = 6 or something very unexpected happened + throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") + } else if (xor < WIDTH2) { // level = 1 + if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } + a1 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + } else if (xor < WIDTH3) { // level = 2 + if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + } else if (xor < WIDTH4) { // level = 3 + if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + } else if (xor < WIDTH5) { // level = 4 + if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + } else { // level = 5 + if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a5 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + a6(idx >>> BITS5) = a5 + } + } + + def result(): Vector[A] = { + if (prefixIsRightAligned) leftAlignPrefix() + val len = len1 + lenRest + val realLen = len - offset + if(realLen == 0) Vector.empty + else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") + else if(len <= WIDTH) { + new Vector1(copyIfDifferentSize(a1, realLen)) + } else if(len <= WIDTH2) { + val i1 = (len-1) & MASK + val i2 = (len-1) >>> BITS + val data = copyOfRange(a2, 1, i2) + val prefix1 = a2(0) + val suffix1 = copyIfDifferentSize(a2(i2), i1+1) + new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) + } else if(len <= WIDTH3) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) + val data = copyOfRange(a3, 1, i3) + val prefix2 = copyTail(a3(0)) + val prefix1 = a3(0)(0) + val suffix2 = copyOf(a3(i3), i2) + val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) + } else if(len <= WIDTH4) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) + val data = copyOfRange(a4, 1, i4) + val prefix3 = copyTail(a4(0)) + val prefix2 = copyTail(a4(0)(0)) + val prefix1 = a4(0)(0)(0) + val suffix3 = copyOf(a4(i4), i3) + val suffix2 = copyOf(a4(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) + } else if(len <= WIDTH5) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) + val data = copyOfRange(a5, 1, i5) + val prefix4 = copyTail(a5(0)) + val prefix3 = copyTail(a5(0)(0)) + val prefix2 = copyTail(a5(0)(0)(0)) + val prefix1 = a5(0)(0)(0)(0) + val suffix4 = copyOf(a5(i5), i4) + val suffix3 = copyOf(a5(i5)(i4), i3) + val suffix2 = copyOf(a5(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) + } else { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) & MASK + val i6 = ((len-1) >>> BITS5) + val data = copyOfRange(a6, 1, i6) + val prefix5 = copyTail(a6(0)) + val prefix4 = copyTail(a6(0)(0)) + val prefix3 = copyTail(a6(0)(0)(0)) + val prefix2 = copyTail(a6(0)(0)(0)(0)) + val prefix1 = a6(0)(0)(0)(0)(0) + val suffix5 = copyOf(a6(i6), i5) + val suffix4 = copyOf(a6(i6)(i5), i4) + val suffix3 = copyOf(a6(i6)(i5)(i4), i3) + val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + val len12345 = len1234 + prefix5.length*WIDTH4 + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) + } + } + + override def toString: String = + s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" + + private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( + a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], + a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] + ).asInstanceOf[Array[Array[_]]] +} + + +/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ +private[immutable] object VectorInline { + // compile-time numeric constants + final val BITS = 5 + final val WIDTH = 1 << BITS + final val MASK = WIDTH - 1 + final val BITS2 = BITS * 2 + final val WIDTH2 = 1 << BITS2 + final val BITS3 = BITS * 3 + final val WIDTH3 = 1 << BITS3 + final val BITS4 = BITS * 4 + final val WIDTH4 = 1 << BITS4 + final val BITS5 = BITS * 5 + final val WIDTH5 = 1 << BITS5 + final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: + final val Log2ConcatFaster = 5 + final val AlignToFaster = 64 + + type Arr1 = Array[AnyRef] + type Arr2 = Array[Array[AnyRef]] + type Arr3 = Array[Array[Array[AnyRef]]] + type Arr4 = Array[Array[Array[Array[AnyRef]]]] + type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] + type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] + + /** Dimension of the slice at index */ + @inline def vectorSliceDim(count: Int, idx: Int): Int = { + val c = count/2 + c+1-abs(idx-c) + } + + @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = + if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) + + @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) + + @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) + + @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = + if(a.length == len) a else copyOf[T](a, len) + + @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } + @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } + @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } + @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } + @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } + + @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { + val a1c = a1.clone() + a1c(idx1) = elem.asInstanceOf[AnyRef] + a1c + } + + @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { + val a2c = a2.clone() + a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) + a2c + } + + @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { + val a3c = a3.clone() + a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) + a3c + } + + @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { + val a4c = a4.clone() + a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) + a4c + } + + @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { + val a5c = a5.clone() + a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) + a5c + } + + @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { + val a6c = a6.clone() + a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) + a6c + } + + @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { + val dest = copyOf[T](a, a.length+b.length) + System.arraycopy(b, 0, dest, a.length, b.length) + dest + } +} + + +/** Helper methods and constants for Vector. */ +private object VectorStatics { + + final def copyAppend1(a: Arr1, elem: Any): Arr1 = { + val alen = a.length + val ac = new Arr1(alen+1) + System.arraycopy(a, 0, ac, 0, alen) + ac(alen) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { + val ac = copyOf(a, a.length+1) + ac(ac.length-1) = elem + ac + } + + final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { + val ac = new Arr1(a.length+1) + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem + ac + } + + final val empty1: Arr1 = new Array(0) + final val empty2: Arr2 = new Array(0) + final val empty3: Arr3 = new Array(0) + final val empty4: Arr4 = new Array(0) + final val empty5: Arr5 = new Array(0) + final val empty6: Arr6 = new Array(0) + + final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { + var i = 0 + val len = a.length + if(level == 0) { + while(i < len) { + f(a(i).asInstanceOf[A]) + i += 1 + } + } else { + val l = level-1 + while(i < len) { + foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + } + } + + final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { + var i = 0 + while(i < a.length) { + val v1 = a(i).asInstanceOf[AnyRef] + val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] + if(v1 ne v2) + return mapElems1Rest(a, f, i, v2) + i += 1 + } + a + } + + final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { + val ac = new Arr1(a.length) + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] + i += 1 + } + ac + } + + final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { + if(n == 1) + mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] + else { + var i = 0 + while(i < a.length) { + val v1 = a(i) + val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) + if(v1 ne v2) + return mapElemsRest(n, a, f, i, v2) + i += 1 + } + a + } + } + + final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + ac.asInstanceOf[Array[T]] + } + + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) + case s => + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-prefix1.length) { + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } else null + } + + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) + case s => + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-suffix1.length) { + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } else null + } +} + + +private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable { + + private[this] var a1: Arr1 = v.prefix1 + private[this] var a2: Arr2 = _ + private[this] var a3: Arr3 = _ + private[this] var a4: Arr4 = _ + private[this] var a5: Arr5 = _ + private[this] var a6: Arr6 = _ + private[this] var a1len = a1.length + private[this] var i1 = 0 // current index in a1 + private[this] var oldPos = 0 + private[this] var len1 = totalLength // remaining length relative to a1 + + private[this] var sliceIdx = 0 + private[this] var sliceDim = 1 + private[this] var sliceStart = 0 // absolute position + private[this] var sliceEnd = a1len // absolute position + + //override def toString: String = + // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" + + @inline override def knownSize = len1 - i1 + + @inline def hasNext: Boolean = len1 > i1 + + def next(): A = { + if(i1 == a1len) advance() + val r = a1(i1) + i1 += 1 + r.asInstanceOf[A] + } + + private[this] def advanceSlice(): Unit = { + if(!hasNext) Iterator.empty.next() + sliceIdx += 1 + var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) + while(slice.length == 0) { + sliceIdx += 1 + slice = v.vectorSlice(sliceIdx) + } + sliceStart = sliceEnd + sliceDim = vectorSliceDim(sliceCount, sliceIdx) + (sliceDim: @switch) match { + case 1 => a1 = slice.asInstanceOf[Arr1] + case 2 => a2 = slice.asInstanceOf[Arr2] + case 3 => a3 = slice.asInstanceOf[Arr3] + case 4 => a4 = slice.asInstanceOf[Arr4] + case 5 => a5 = slice.asInstanceOf[Arr5] + case 6 => a6 = slice.asInstanceOf[Arr6] + } + sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) + if(sliceEnd > totalLength) sliceEnd = totalLength + if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 + } + + private[this] def advance(): Unit = { + val pos = i1-len1+totalLength + if(pos == sliceEnd) advanceSlice() + if(sliceDim > 1) { + val io = pos - sliceStart + val xor = oldPos ^ io + advanceA(io, xor) + oldPos = io + } + len1 -= i1 + a1len = mmin(a1.length, len1) + i1 = 0 + } + + private[this] def advanceA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2(0) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3(0) + a1 = a2(0) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } else { + a5 = a6(io >>> BITS5) + a4 = a5(0) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } + } + + private[this] def setA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else { + a5 = a6(io >>> BITS5) + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } + } + + override def drop(n: Int): Iterator[A] = { + if(n > 0) { + val oldpos = i1-len1+totalLength + val newpos = mmin(oldpos + n, totalLength) + if(newpos == totalLength) { + i1 = 0 + len1 = 0 + a1len = 0 + } else { + while(newpos >= sliceEnd) advanceSlice() + val io = newpos - sliceStart + if(sliceDim > 1) { + val xor = oldPos ^ io + setA(io, xor) + oldPos = io + } + a1len = a1.length + i1 = io & MASK + len1 = i1 + (totalLength-newpos) + if(a1len > len1) a1len = len1 + } + } + this + } + + override def take(n: Int): Iterator[A] = { + if(n < knownSize) { + val trunc = knownSize - mmax(0, n) + totalLength -= trunc + len1 -= trunc + if(len1 < a1len) a1len = len1 + if(totalLength < sliceEnd) sliceEnd = totalLength + } + this + } + + override def slice(from: Int, until: Int): Iterator[A] = { + val _until = + if(from > 0) { + drop(from) + until - from + } else until + take(_until) + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val xsLen = xs.length + val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) + var copied = 0 + val isBoxed = xs.isInstanceOf[Array[AnyRef]] + while(copied < total) { + if(i1 == a1len) advance() + val count = mmin(total-copied, a1.length-i1) + if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) + else Array.copy(a1, i1, xs, start+copied, count) + i1 += count + copied += count + } + total + } + + override def toVector: Vector[A] = + v.slice(i1-len1+totalLength, totalLength) + + protected[immutable] def split(at: Int): NewVectorIterator[A] = { + val it2 = clone().asInstanceOf[NewVectorIterator[A]] + it2.take(at) + drop(at) + it2 + } +} + + +private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) + extends Stepper[A] with EfficientSplit { + + protected[this] def build(it: NewVectorIterator[A]): Semi + + final def hasStep: Boolean = it.hasNext + + final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + final def estimateSize: Long = it.knownSize + + def trySplit(): Sub = { + val len = it.knownSize + if(len > 1) build(it.split(len >>> 1)) + else null + } + + override final def iterator: Iterator[A] = it +} + +private class AnyVectorStepper[A](it: NewVectorIterator[A]) + extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { + protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) + def nextStep(): A = it.next() +} + +private class DoubleVectorStepper(it: NewVectorIterator[Double]) + extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { + protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) + def nextStep(): Double = it.next() +} + +private class IntVectorStepper(it: NewVectorIterator[Int]) + extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { + protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) + def nextStep(): Int = it.next() +} + +private class LongVectorStepper(it: NewVectorIterator[Long]) + extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { + protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) + def nextStep(): Long = it.next() +} + + +// The following definitions are needed for binary compatibility with ParVector +private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { + private[immutable] var it: NewVectorIterator[A @uncheckedVariance] = _ + def hasNext: Boolean = it.hasNext + def next(): A = it.next() + private[collection] def remainingElementCount: Int = it.size + private[collection] def remainingVector: Vector[A] = it.toVector +} diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala new file mode 100644 index 000000000000..cd8cf06c5c68 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala @@ -0,0 +1,275 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec + +/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. + * + * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense + * of using extra memory and generally lower performance for other operations + * + * @tparam K the type of the keys contained in this vector map. + * @tparam V the type of the values associated with the keys in this vector map. + * + * @define coll immutable vector map + * @define Coll `immutable.VectorMap` + */ +final class VectorMap[K, +V] private ( + private[immutable] val fields: Vector[Any], + private[immutable] val underlying: Map[K, (Int, V)], dropped: Int) + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] + with MapFactoryDefaults[K, V, VectorMap, Iterable] { + + import VectorMap._ + + override protected[this] def className: String = "VectorMap" + + private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = { + this(fields, underlying, 0) + } + + override val size = underlying.size + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { + underlying.get(key) match { + case Some((slot, _)) => + new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) + case None => + new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) + } + } + + override def withDefault[V1 >: V](d: K => V1): Map[K, V1] = + new Map.WithDefault(this, d) + + override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = + new Map.WithDefault[K, V1](this, _ => d) + + def get(key: K): Option[V] = underlying.get(key) match { + case Some(v) => Some(v._2) + case None => None + } + + @tailrec + private def nextValidField(slot: Int): (Int, K) = { + if (slot >= fields.size) (-1, null.asInstanceOf[K]) + else fields(slot) match { + case Tombstone(distance) => + nextValidField(slot + distance) + case k => + (slot, k.asInstanceOf[K]) + } + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val fieldsLength = fields.length + private[this] var slot = -1 + private[this] var key: K = null.asInstanceOf[K] + + private[this] def advance(): Unit = { + val nextSlot = slot + 1 + if (nextSlot >= fieldsLength) { + slot = fieldsLength + key = null.asInstanceOf[K] + } else { + nextValidField(nextSlot) match { + case (-1, _) => + slot = fieldsLength + key = null.asInstanceOf[K] + case (s, k) => + slot = s + key = k + } + } + } + + advance() + + override def hasNext: Boolean = slot < fieldsLength + + override def next(): (K, V) = { + if (!hasNext) throw new NoSuchElementException("next called on depleted iterator") + val result = (key, underlying(key)._2) + advance() + result + } + } + + // No-Op overrides to allow for more efficient steppers in a minor release. + // Refining the return type to `S with EfficientSplit` is binary compatible. + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) + + + def removed(key: K): VectorMap[K, V] = { + if (isEmpty) empty + else { + var fs = fields + val sz = fs.size + underlying.get(key) match { + case Some(_) if size == 1 => empty + case Some((slot, _)) => + val s = slot - dropped + + // Calculate next of kin + val next = + if (s < sz - 1) fs(s + 1) match { + case Tombstone(d) => s + d + 1 + case _ => s + 1 + } else s + 1 + + fs = fs.updated(s, Tombstone(next - s)) + + // Calculate first index of preceding tombstone sequence + val first = + if (s > 0) { + fs(s - 1) match { + case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 + case Tombstone(d) if d == 1 => s - 1 + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case _ => s + } + }else s + fs = fs.updated(first, Tombstone(next - first)) + + // Calculate last index of succeeding tombstone sequence + val last = next - 1 + if (last != first) { + fs = fs.updated(last, Tombstone(first - 1 - last)) + } + new VectorMap(fs, underlying - key, dropped) + case _ => + this + } + } + } + + override def mapFactory: MapFactory[VectorMap] = VectorMap + + override def contains(key: K): Boolean = underlying.contains(key) + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + val lastSlot = fields.length - 1 + val last = fields.last match { + case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] + case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => k.asInstanceOf[K] + } + (last, underlying(last)._2) + } + + override def lastOption: Option[(K, V)] = { + if (isEmpty) None + else Some(last) + } + + override def tail: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + val (slot, key) = nextValidField(0) + new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) + } + + override def init: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + val lastSlot = fields.size - 1 + val (slot, key) = fields.last match { + case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) + case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => (lastSlot, k.asInstanceOf[K]) + } + new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) + } + + override def keys: Vector[K] = keysIterator.toVector + + override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { + override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) + } +} + +object VectorMap extends MapFactory[VectorMap] { + //Class to mark deleted slots in 'fields'. + //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' + // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). + //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' + // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. + //For other deleted slots, it simply indicates that they have been deleted. + private[VectorMap] final case class Tombstone(distance: Int) + + private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = + new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) + + def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): VectorMap[K, V] = + it match { + case vm: VectorMap[K, V] => vm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] +} + +private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { + private[this] val vectorBuilder = new VectorBuilder[K] + private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] + private[this] var aliased: VectorMap[K, V] = _ + + override def clear(): Unit = { + vectorBuilder.clear() + mapBuilder.clear() + aliased = null + } + + override def result(): VectorMap[K, V] = { + if (aliased eq null) { + aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) + } + aliased + } + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + mapBuilder.getOrElse(key, null) match { + case (slot, _) => + mapBuilder.addOne(key, (slot, value)) + case null => + val vectorSize = vectorBuilder.size + vectorBuilder.addOne(key) + mapBuilder.addOne(key, (vectorSize, value)) + } + } + this + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) +} diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala new file mode 100644 index 000000000000..f2fdb8e3c32e --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala @@ -0,0 +1,140 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.Predef.{wrapString => _, assert} +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.CharStringStepper +import scala.collection.mutable.{Builder, StringBuilder} + +/** + * This class serves as a wrapper augmenting `String`s with all the operations + * found in indexed sequences. + * + * The difference between this class and `StringOps` is that calling transformer + * methods such as `filter` and `map` will yield an object of type `WrappedString` + * rather than a `String`. + * + * @param self a string contained within this wrapped string + * + * @define Coll `WrappedString` + * @define coll wrapped string + */ +@SerialVersionUID(3L) +final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, WrappedString] + with Serializable { + + def apply(i: Int): Char = self.charAt(i) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder + override def empty: WrappedString = WrappedString.empty + + override def slice(from: Int, until: Int): WrappedString = { + val start = if (from < 0) 0 else from + if (until <= start || start >= self.length) + return WrappedString.empty + + val end = if (until > length) length else until + new WrappedString(self.substring(start, end)) + } + override def length = self.length + override def toString = self + override def view: StringView = new StringView(self) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { + val st = new CharStringStepper(self, 0, self.length) + val r = + if (shape.shape == StepperShape.CharShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def startsWith[B >: Char](that: IterableOnce[B], offset: Int = 0): Boolean = + that match { + case s: WrappedString => self.startsWith(s.self, offset) + case _ => super.startsWith(that, offset) + } + + override def endsWith[B >: Char](that: collection.Iterable[B]): Boolean = + that match { + case s: WrappedString => self.endsWith(s.self) + case _ => super.endsWith(that) + } + + override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { + case c: Char => self.indexOf(c, from) + case _ => super.indexOf(elem, from) + } + + override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = + elem match { + case c: Char => self.lastIndexOf(c, end) + case _ => super.lastIndexOf(elem, end) + } + + override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = + (xs: Any) match { + case chs: Array[Char] => + val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) + self.getChars(0, copied, chs, start) + copied + case _ => super.copyToArray(xs, start, len) + } + + override def appendedAll[B >: Char](suffix: IterableOnce[B]): IndexedSeq[B] = + suffix match { + case s: WrappedString => new WrappedString(self concat s.self) + case _ => super.appendedAll(suffix) + } + + override def sameElements[B >: Char](o: IterableOnce[B]) = o match { + case s: WrappedString => self == s.self + case _ => super.sameElements(o) + } + + override protected[this] def className = "WrappedString" + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + override def equals(other: Any): Boolean = other match { + case that: WrappedString => + this.self == that.self + case _ => + super.equals(other) + } +} + +/** A companion object for wrapped strings. + */ +@SerialVersionUID(3L) +object WrappedString extends SpecificIterableFactory[Char, WrappedString] { + def fromSpecific(it: IterableOnce[Char]): WrappedString = { + val b = newBuilder + val s = it.knownSize + if(s >= 0) b.sizeHint(s) + b ++= it + b.result() + } + val empty: WrappedString = new WrappedString("") + def newBuilder: Builder[Char, WrappedString] = + new StringBuilder().mapResult(x => new WrappedString(x)) + + implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { + def unwrap: String = value.self + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala new file mode 100644 index 000000000000..8458429727e8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/package.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object immutable { + type StringOps = scala.collection.StringOps + val StringOps = scala.collection.StringOps + type StringView = scala.collection.StringView + val StringView = scala.collection.StringView + + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + + @deprecated("Use Map instead of DefaultMap", "2.13.0") + type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] +} diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala new file mode 100644 index 000000000000..c02a10770696 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala @@ -0,0 +1,601 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions + +/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically significantly faster with `AnyRefMap` than [[HashMap]]. + * Note that numbers and characters are not handled specially in AnyRefMap; + * only plain `equals` and `hashCode` are used in comparisons. + * + * Methods that traverse or regenerate the map, including `foreach` and `map`, + * are not in general faster than with `HashMap`. The methods `foreachKey`, + * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster + * than alternative ways to achieve the same functionality. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `AnyRefMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29^ entries (approximately + * 500 million). The maximum capacity is 2^30^, but performance will degrade + * rapidly as 2^30^ is approached. + * + */ +class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[K, V] + with MapOps[K, V, Map, AnyRefMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] + with Serializable { + + import AnyRefMap._ + def this() = this(AnyRefMap.exceptionDefault, 16, true) + + /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: K => V) = this(defaultEntry, 16, true) + + /** Creates a new `AnyRefMap` with an initial buffer of specified size. + * + * An `AnyRefMap` can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ + def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _hashes: Array[Int] = null + private[this] var _keys: Array[AnyRef] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int): Unit = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] + ): Unit = { + mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz + } + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): AnyRefMap[K,V] = { + var sz = coll.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + coll.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder + + override def size: Int = _size + override def knownSize: Int = size + override def isEmpty: Boolean = _size == 0 + override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def hashOf(key: K): Int = { + // Note: this method must not return 0 or Int.MinValue, as these indicate no element + if (key eq null) 0x41081989 + else { + val h = key.hashCode + // Part of the MurmurHash3 32 bit finalizer + val i = (h ^ (h >>> 16)) * 0x85EBCA6B + val j = (i ^ (i >>> 13)) & 0x7FFFFFFF + if (j==0) 0x41081989 else j + } + } + + private def seekEntry(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + val hashes = _hashes + val keys = _keys + while ({ g = hashes(e); g != 0}) { + if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + e | MissingBit + } + + @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + var o = -1 + while ({ g = _hashes(e); g != 0}) { + if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + else if (o == -1 && g+g == 0) o = e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (o >= 0) o | MissVacant else e | MissingBit + } + + override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 + + override def get(key: K): Option[V] = { + val i = seekEntry(hashOf(key), key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val i = seekEntry(hashOf(key), key) + if (i < 0) default else _values(i).asInstanceOf[V] + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val h = hashOf(key) + var i = seekEntryOrOpen(h, key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val oh = _hashes + val ans = defaultValue + if (oh ne _hashes) { + i = seekEntryOrOpen(h, key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key.asInstanceOf[AnyRef] + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: K): V = { + val i = seekEntry(hashOf(key), key) + (if (i < 0) null else _values(i)).asInstanceOf[V] + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead; an exception will be thrown if no + * `defaultEntry` was supplied. + */ + override def apply(key: K): V = { + val i = seekEntry(hashOf(key), key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + + /** Defers to defaultEntry to find a default value for the key. Throws an + * exception if no other default behavior was specified. + */ + override def default(key: K): V = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val oh = _hashes + val ok = _keys + val ov = _values + mask = newMask + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < oh.length) { + val h = oh(i) + if (h+h != 0) { + var e = h & mask + var x = 0 + while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + _hashes(e) = h + _keys(e) = ok(i) + _values(e) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: K, value: V): Option[V] = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to an `AnyRefMap`. + */ + override def update(key: K, value: V): Unit = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: K, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: K): this.type = { + val i = seekEntry(hashOf(key), key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _hashes(i) = Int.MinValue + _keys(i) = null + _values(i) = null + } + this + } + + def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { + protected def nextResult(k: K, v: V) = (k, v) + } + override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { + protected def nextResult(k: K, v: V) = k + } + override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { + protected def nextResult(k: K, v: V) = v + } + + private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { + private[this] val hz = _hashes + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var index = 0 + + def hasNext: Boolean = index= hz.length) return false + h = hz(index) + } + true + } + + def next(): A = { + if (hasNext) { + val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) + index += 1 + ans + } + else throw new NoSuchElementException("next") + } + + protected def nextResult(k: K, v: V): A + } + + + override def foreach[U](f: ((K,V)) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) + i += 1 + e -= 1 + } + else return + } + } + + override def foreachEntry[U](f: (K,V) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) + i += 1 + e -= 1 + } + else return + } + } + + override def clone(): AnyRefMap[K, V] = { + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val arm = new AnyRefMap[K, V](defaultEntry, 1, false) + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V2]] + xs.iterator.foreach(kv => arm += kv) + arm + } + + override def ++[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) + + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + f(elems(i).asInstanceOf[A]) + } + i += 1 + } + } + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) + + /** Creates a new `AnyRefMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { + val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) + def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.Map(this, f)) + def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.FlatMap(this, f)) + def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) + + override def clear(): Unit = { + import java.util.Arrays.fill + fill(_keys, null) + fill(_values, null) + fill(_hashes, 0) + _size = 0 + _vacant = 0 + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "AnyRefMap" +} + +object AnyRefMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private class ExceptionDefault extends (Any => Nothing) with Serializable { + def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) + } + private val exceptionDefault = new ExceptionDefault + + /** A builder for instances of `AnyRefMap`. + * + * This builder can be reused to create multiple instances. + */ + final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { + private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] + def addOne(entry: (K, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new AnyRefMap[K, V] + def result(): AnyRefMap[K, V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ + def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + + def newBuilder[K <: AnyRef, V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + + private def buildFromIterableOnce[K <: AnyRef, V](elems: IterableOnce[(K, V)]): AnyRefMap[K, V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + elems.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new empty `AnyRefMap`. */ + def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] + + /** Creates a new empty `AnyRefMap` with the supplied default */ + def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + + /** Creates a new `AnyRefMap` from an existing source collection. A source collection + * which is already an `AnyRefMap` gets cloned. + * + * @param source Source collection + * @tparam K the type of the keys + * @tparam V the type of the values + * @return a new `AnyRefMap` with the elements of `source` + */ + def from[K <: AnyRef, V](source: IterableOnce[(K, V)]): AnyRefMap[K, V] = source match { + case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] + case _ => buildFromIterableOnce(source) + } + + /** Creates a new `AnyRefMap` from arrays of keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { + val sz = math.min(keys.length, values.length) + val arm = new AnyRefMap[K, V](sz * 2) + var i = 0 + while (i < sz) { arm(keys(i)) = values(i); i += 1 } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new `AnyRefMap` from keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = { + val sz = math.min(keys.size, values.size) + val arm = new AnyRefMap[K, V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() + if (arm.size < (sz >> 3)) arm.repack() + arm + } + + implicit def toFactory[K <: AnyRef, V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def toBuildFrom[K <: AnyRef, V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]) = AnyRefMap.from(it) + def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def iterableFactory[K <: AnyRef, V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala new file mode 100644 index 000000000000..e3ddeb71ef8e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -0,0 +1,403 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import java.util.Arrays + +import scala.annotation.nowarn +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable + +/** An implementation of the `Buffer` class using an array to + * represent the assembled sequence internally. Append, update and random + * access take constant time (amortized time). Prepends and removes are + * linear in the buffer size. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] + * section on `Array Buffers` for more information. + + * + * @tparam A the type of this arraybuffer's elements. + * + * @define Coll `mutable.ArrayBuffer` + * @define coll array buffer + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(-1582447879429021880L) +class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) + extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with IterableFactoryDefaults[A, ArrayBuffer] + with DefaultSerializable { + + def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) + + def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) + + @transient private[this] var mutationCount: Int = 0 + + // needs to be `private[collection]` or `protected[collection]` for parallel-collections + protected[collection] var array: Array[AnyRef] = initialElements + protected var size0 = initialSize + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) + } + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + /** Ensure that the internal array has at least `n` cells. */ + protected def ensureSize(n: Int): Unit = { + array = ArrayBuffer.ensureSize(array, size0, n) + } + + // TODO 3.T: should be `protected`, perhaps `protected[this]` + /** Ensure that the internal array has at least `n` additional cells more than `size0`. */ + private[mutable] def ensureAdditionalSize(n: Int): Unit = { + // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow + array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n) + } + + def sizeHint(size: Int): Unit = + if(size > length && size >= 1) ensureSize(size) + + /** Reduce length to `n`, nulling out all dropped elements */ + private def reduceToSize(n: Int): Unit = { + mutationCount += 1 + Arrays.fill(array, n, size0, null) + size0 = n + } + + /** Trims the ArrayBuffer to an appropriate size for the current + * number of elements (rounding up to the next natural size), + * which may replace the array by a shorter one. + * This allows releasing some unused memory. + */ + def trimToSize(): Unit = { + resize(length) + } + + /** Trims the `array` buffer size down to either a power of 2 + * or Int.MaxValue while keeping first `requiredLength` elements. + */ + private def resize(requiredLength: Int): Unit = + array = ArrayBuffer.downsize(array, requiredLength) + + @inline private def checkWithinBounds(lo: Int, hi: Int) = { + if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})") + if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})") + } + + def apply(n: Int): A = { + checkWithinBounds(n, n + 1) + array(n).asInstanceOf[A] + } + + def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index + 1) + mutationCount += 1 + array(index) = elem.asInstanceOf[AnyRef] + } + + def length = size0 + + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) + + override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer + + /** Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = reduceToSize(0) + + /** + * Clears this buffer and shrinks to @param size (rounding up to the next + * natural size) + * @param size + */ + def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { + clear() + resize(size) + this + } + + def addOne(elem: A): this.type = { + mutationCount += 1 + ensureAdditionalSize(1) + val oldSize = size0 + size0 = oldSize + 1 + this(oldSize) = elem + this + } + + // Overridden to use array copying for efficiency where possible. + override def addAll(elems: IterableOnce[A]): this.type = { + elems match { + case elems: ArrayBuffer[_] => + val elemsLength = elems.size0 + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } + case _ => super.addAll(elems) + } + this + } + + def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index) + mutationCount += 1 + ensureAdditionalSize(1) + Array.copy(array, index, array, index + 1, size0 - index) + size0 += 1 + this(index) = elem + } + + def prepend(elem: A): this.type = { + insert(0, elem) + this + } + + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + checkWithinBounds(index, index) + elems match { + case elems: collection.Iterable[A] => + val elemsLength = elems.size + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + val len = size0 + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the the same reference + val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") + size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy + } + case _ => insertAll(index, ArrayBuffer.from(elems)) + } + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int): A = { + checkWithinBounds(index, index + 1) + val res = this(index) + Array.copy(array, index + 1, array, index, size0 - (index + 1)) + reduceToSize(size0 - 1) + res + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = + if (count > 0) { + checkWithinBounds(index, index + count) + Array.copy(array, index + count, array, index, size0 - (index + count)) + reduceToSize(size0 - count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) + } + + @deprecated("Use 'this' instance instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def result(): this.type = this + + @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayBuffer" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } + this + } + + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) + + override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op) + + override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op) +} + +/** + * Factory object for the `ArrayBuffer` class. + * + * $factoryInfo + * + * @define coll array buffer + * @define Coll `mutable.ArrayBuffer` + */ +@SerialVersionUID(3L) +object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { + final val DefaultInitialSize = 16 + private[this] val emptyArray = new Array[AnyRef](0) + + def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + val k = coll.knownSize + if (k >= 0) { + // Avoid reallocation of buffer if length is known + val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + new ArrayBuffer[B](array, k) + } + else new ArrayBuffer[B] ++= coll + } + + def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + new GrowableBuilder[A, ArrayBuffer[A]](empty) { + override def sizeHint(size: Int): Unit = elems.ensureSize(size) + } + + def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + + /** + * @param arrayLen the length of the backing array + * @param targetLen the minimum length to resize up to + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeUp(arrayLen: Long, targetLen: Long): Int = { + if (targetLen <= arrayLen) -1 + else { + if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements") + IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue` + + val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) + math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt + } + } + // if necessary, copy (curSize elements of) the array to a new array of capacity n. + // Should use Array.copyOf(array, resizeEnsuring(array.length))? + private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = { + val newLen = resizeUp(array.length, targetSize) + if (newLen < 0) array + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, curSize) + res + } + } + + /** + * @param arrayLen the length of the backing array + * @param targetLen the length to resize down to, if smaller than `arrayLen` + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeDown(arrayLen: Int, targetLen: Int): Int = + if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) + private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { + val newLen = resizeDown(array.length, targetSize) + if (newLen < 0) array + else if (newLen == 0) emptyArray + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, targetSize) + res + } + } +} + +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) + extends AbstractIndexedSeqView[A] { + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this({ + val _array = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + } + }, () => 0) + } + + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] + + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length + override protected[this] def className = "ArrayBufferView" + + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala new file mode 100644 index 000000000000..454527bcdebd --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -0,0 +1,522 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.reflect.ClassTag + +/** A builder class for arrays. + * + * @tparam T the type of the elements for the builder. + */ +@SerialVersionUID(3L) +sealed abstract class ArrayBuilder[T] + extends ReusableBuilder[T, Array[T]] + with Serializable { + protected[this] var capacity: Int = 0 + protected[this] def elems: Array[T] + protected var size: Int = 0 + + def length: Int = size + + override def knownSize: Int = size + + protected[this] final def ensureSize(size: Int): Unit = { + if (capacity < size || capacity == 0) { + var newsize = if (capacity == 0) 16 else capacity * 2 + while (newsize < size) newsize *= 2 + resize(newsize) + } + } + + override final def sizeHint(size: Int): Unit = + if (capacity < size) resize(size) + + def clear(): Unit = size = 0 + + protected[this] def resize(size: Int): Unit + + /** Add all elements of an array */ + def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) + + /** Add a slice of an array */ + def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { + ensureSize(this.size + length) + Array.copy(xs, offset, elems, this.size, length) + size += length + this + } + + override def addAll(xs: IterableOnce[T]): this.type = { + val k = xs.knownSize + if (k > 0) { + ensureSize(this.size + k) + val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + size += k + } else if (k < 0) super.addAll(xs) + this + } +} + +/** A companion object for array builders. + */ +object ArrayBuilder { + + /** Creates a new arraybuilder of type `T`. + * + * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. + * @return a new empty array builder. + */ + @inline def make[T: ClassTag]: ArrayBuilder[T] = { + val tag = implicitly[ClassTag[T]] + tag.runtimeClass match { + case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] + case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] + case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] + case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] + case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] + case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] + case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] + case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] + case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] + case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] + } + } + + /** A class for array builders for arrays of reference types. + * + * This builder can be reused. + * + * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. + */ + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { + + protected var elems: Array[T] = _ + + private def mkArray(size: Int): Array[T] = { + if (capacity == size && capacity > 0) elems + else if (elems eq null) new Array[T](size) + else java.util.Arrays.copyOf[T](elems, size) + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: T): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[T] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def clear(): Unit = { + super.clear() + if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) + } + + override def equals(other: Any): Boolean = other match { + case x: ofRef[_] => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofRef" + } + + /** A class for array builders for arrays of `byte`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofByte extends ArrayBuilder[Byte] { + + protected var elems: Array[Byte] = _ + + private def mkArray(size: Int): Array[Byte] = { + val newelems = new Array[Byte](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Byte): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Byte] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofByte => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofByte" + } + + /** A class for array builders for arrays of `short`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofShort extends ArrayBuilder[Short] { + + protected var elems: Array[Short] = _ + + private def mkArray(size: Int): Array[Short] = { + val newelems = new Array[Short](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Short): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Short] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofShort => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofShort" + } + + /** A class for array builders for arrays of `char`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofChar extends ArrayBuilder[Char] { + + protected var elems: Array[Char] = _ + + private def mkArray(size: Int): Array[Char] = { + val newelems = new Array[Char](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Char): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Char] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofChar => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofChar" + } + + /** A class for array builders for arrays of `int`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofInt extends ArrayBuilder[Int] { + + protected var elems: Array[Int] = _ + + private def mkArray(size: Int): Array[Int] = { + val newelems = new Array[Int](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Int): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Int] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofInt => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofInt" + } + + /** A class for array builders for arrays of `long`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofLong extends ArrayBuilder[Long] { + + protected var elems: Array[Long] = _ + + private def mkArray(size: Int): Array[Long] = { + val newelems = new Array[Long](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Long): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Long] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofLong => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofLong" + } + + /** A class for array builders for arrays of `float`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofFloat extends ArrayBuilder[Float] { + + protected var elems: Array[Float] = _ + + private def mkArray(size: Int): Array[Float] = { + val newelems = new Array[Float](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Float): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Float] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofFloat => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofFloat" + } + + /** A class for array builders for arrays of `double`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofDouble extends ArrayBuilder[Double] { + + protected var elems: Array[Double] = _ + + private def mkArray(size: Int): Array[Double] = { + val newelems = new Array[Double](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Double): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Double] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofDouble => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofDouble" + } + + /** A class for array builders for arrays of `boolean`s. It can be reused. */ + @SerialVersionUID(3L) + class ofBoolean extends ArrayBuilder[Boolean] { + + protected var elems: Array[Boolean] = _ + + private def mkArray(size: Int): Array[Boolean] = { + val newelems = new Array[Boolean](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Boolean): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Boolean] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofBoolean => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofBoolean" + } + + /** A class for array builders for arrays of `Unit` type. It can be reused. */ + @SerialVersionUID(3L) + final class ofUnit extends ArrayBuilder[Unit] { + + protected def elems: Array[Unit] = throw new UnsupportedOperationException() + + def addOne(elem: Unit): this.type = { + size += 1 + this + } + + override def addAll(xs: IterableOnce[Unit]): this.type = { + size += xs.iterator.size + this + } + + override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { + size += length + this + } + + def result() = { + val ans = new Array[Unit](size) + var i = 0 + while (i < size) { ans(i) = (); i += 1 } + ans + } + + override def equals(other: Any): Boolean = other match { + case x: ofUnit => (size == x.size) + case _ => false + } + + protected[this] def resize(size: Int): Unit = () + + override def toString = "ArrayBuilder.ofUnit" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala new file mode 100644 index 000000000000..205e1607f824 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -0,0 +1,645 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag + +/** An implementation of a double-ended queue that internally uses a resizable circular buffer. + * + * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) + * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) + * and thus insertions and removals from end/beginning are fast. + * + * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. + * + * @tparam A the type of this ArrayDeque's elements. + * + * @define Coll `mutable.ArrayDeque` + * @define coll array deque + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class ArrayDeque[A] protected ( + protected var array: Array[AnyRef], + private[ArrayDeque] var start: Int, + private[ArrayDeque] var end: Int +) extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with IterableFactoryDefaults[A, ArrayDeque] + with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] + with Cloneable[ArrayDeque[A]] + with DefaultSerializable { + + reset(array, start, end) + + private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { + assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") + requireBounds(idx = start, until = array.length) + requireBounds(idx = end, until = array.length) + this.array = array + this.start = start + this.end = end + } + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + // No-Op override to allow for more efficient stepper in a minor release. + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) + + def apply(idx: Int): A = { + requireBounds(idx) + _get(idx) + } + + def update(idx: Int, elem: A): Unit = { + requireBounds(idx) + _set(idx, elem) + } + + def addOne(elem: A): this.type = { + ensureSize(length + 1) + appendAssumingCapacity(elem) + } + + def prepend(elem: A): this.type = { + ensureSize(length + 1) + prependAssumingCapacity(elem) + } + + @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { + array(end) = elem.asInstanceOf[AnyRef] + end = end_+(1) + this + } + + @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { + start = start_-(1) + array(start) = elem.asInstanceOf[AnyRef] + this + } + + override def prependAll(elems: IterableOnce[A]): this.type = { + val it = elems.iterator + if (it.nonEmpty) { + val n = length + // The following code resizes the current collection at most once and traverses elems at most twice + elems.knownSize match { + // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq + case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) + + // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront + case srcLength if mustGrow(srcLength + n) => + val finalLength = srcLength + n + val array2 = ArrayDeque.alloc(finalLength) + it.copyToArray(array2.asInstanceOf[Array[A]]) + copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + + // Just fill up from (start - srcLength) to (start - 1) and move back start + case srcLength => + // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` + var i = 0 + while(i < srcLength) { + _set(i - srcLength, it.next()) + i += 1 + } + start = start_-(srcLength) + } + } + this + } + + override def addAll(elems: IterableOnce[A]): this.type = { + elems.knownSize match { + case srcLength if srcLength > 0 => + ensureSize(srcLength + length) + elems.iterator.foreach(appendAssumingCapacity) + case _ => elems.iterator.foreach(+=) + } + this + } + + def insert(idx: Int, elem: A): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prepend(elem) + } else if (idx == n) { + addOne(elem) + } else { + val finalLength = n + 1 + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + array2(idx) = elem.asInstanceOf[AnyRef] + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (n <= idx * 2) { + var i = n - 1 + while(i >= idx) { + _set(i + 1, _get(i)) + i -= 1 + } + end = end_+(1) + i += 1 + _set(i, elem) + } else { + var i = 0 + while(i < idx) { + _set(i - 1, _get(i)) + i += 1 + } + start = start_-(1) + _set(i, elem) + } + } + } + + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prependAll(elems) + } else if (idx == n) { + addAll(elems) + } else { + // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) + val (it, srcLength) = { + val _srcLength = elems.knownSize + if (_srcLength >= 0) (elems.iterator, _srcLength) + else { + val indexed = IndexedSeq.from(elems) + (indexed.iterator, indexed.size) + } + } + if (it.nonEmpty) { + val finalLength = srcLength + n + // Either we resize right away or move prefix left or suffix right + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + it.copyToArray(array2.asInstanceOf[Array[A]], idx) + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx >= n) { // Cheaper to shift the suffix right + var i = n - 1 + while(i >= idx) { + _set(i + srcLength, _get(i)) + i -= 1 + } + end = end_+(srcLength) + while(it.hasNext) { + i += 1 + _set(i, it.next()) + } + } else { // Cheaper to shift prefix left + var i = 0 + while(i < idx) { + _set(i - srcLength, _get(i)) + i += 1 + } + start = start_-(srcLength) + while(it.hasNext) { + _set(i, it.next()) + i += 1 + } + } + } + } + } + + def remove(idx: Int, count: Int): Unit = { + if (count > 0) { + requireBounds(idx) + val n = length + val removals = Math.min(n - idx, count) + val finalLength = n - removals + val suffixStart = idx + removals + // If we know we can resize after removing, do it right away using arrayCopy + // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left + if (shouldShrink(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx <= finalLength) { // Cheaper to move the prefix right + var i = suffixStart - 1 + while(i >= removals) { + _set(i, _get(i - removals)) + i -= 1 + } + while(i >= 0) { + _set(i, null.asInstanceOf[A]) + i -= 1 + } + start = start_+(removals) + } else { // Cheaper to move the suffix left + var i = idx + while(i < finalLength) { + _set(i, _get(i + removals)) + i += 1 + } + while(i < n) { + _set(i, null.asInstanceOf[A]) + i += 1 + } + end = end_-(removals) + } + } else { + require(count == 0, s"removing negative number of elements: $count") + } + } + + def remove(idx: Int): A = { + val elem = this(idx) + remove(idx, 1) + elem + } + + override def subtractOne(elem: A): this.type = { + val idx = indexOf(elem) + if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API + this + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the first element (throws exception when empty) + * See also removeHeadOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeHead(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) + + @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + val elem = array(start) + array(start) = null + start = start_+(1) + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the last element (throws exception when empty) + * See also removeLastOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeLast(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) + + @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + end = end_-(1) + val elem = array(end) + array(end) = null + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * Remove all elements from this collection and return the elements while emptying this data structure + * @return + */ + def removeAll(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Remove all elements from this collection and return the elements in reverse while emptying this data structure + * @return + */ + def removeAllReverse(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the left of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(headOption.exists(f)) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the right of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(lastOption.exists(f)) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** Returns the first element which satisfies the given predicate after or at some start index + * and removes this element from the collections + * + * @param p the predicate used for choosing the first element + * @param from the start index + * @return the first element of the queue for which p yields true + */ + def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { + val i = indexWhere(p, from) + if (i < 0) None else Some(remove(i)) + } + + /** Returns all elements in this collection which satisfy the given predicate + * and removes those elements from this collections. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { + val res = scala.collection.immutable.Seq.newBuilder[A] + var i, j = 0 + while (i < size) { + if (p(this(i))) { + res += this(i) + } else { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + if (i != j) takeInPlace(j) + res.result() + } + + @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) + + def length = end_-(start) + + override def isEmpty = start == end + + override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) + + override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque + + /** + * Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = { + while(nonEmpty) { + removeHeadAssumingNonEmpty() + } + } + + /** + * Clears this buffer and shrinks to @param size + * + * @param size + * @return + */ + def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { + reset(array = ArrayDeque.alloc(size), start = 0, end = 0) + this + } + + protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = + new ArrayDeque[A](array, start = 0, end) + + override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) + if (copied > 0) { + copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) + } + copied + } + + override def toArray[B >: A: ClassTag]: Array[B] = + copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) + + /** + * Trims the capacity of this ArrayDeque's instance to be the current size + */ + def trimToSize(): Unit = resize(length) + + // Utils for common modular arithmetic: + @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) + @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) + @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) + @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) + + // Note: here be overflow dragons! This is used for int overflow + // assumptions in resize(). Use caution changing. + @inline private[this] def mustGrow(len: Int) = { + len >= array.length + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def shouldShrink(len: Int) = { + // To avoid allocation churn, only shrink when array is large + // and less than 2/5 filled. + array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def canShrink(len: Int) = { + array.length > ArrayDeque.DefaultInitialSize && array.length - len > len + } + + @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] + + @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] + + // Assumes that 0 <= len. + private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { + val n = length + val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) + reset(array = array2, start = 0, end = n) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayDeque" +} + +/** + * $factoryInfo + * @define coll array deque + * @define Coll `ArrayDeque` + */ +@SerialVersionUID(3L) +object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { + + def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + val s = coll.knownSize + if (s >= 0) { + val array = alloc(s) + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") + new ArrayDeque[B](array, start = 0, end = s) + } else new ArrayDeque[B]() ++= coll + } + + def newBuilder[A]: Builder[A, ArrayDeque[A]] = + new GrowableBuilder[A, ArrayDeque[A]](empty) { + override def sizeHint(size: Int): Unit = { + elems.ensureSize(size) + } + } + + def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + + final val DefaultInitialSize = 16 + + /** + * We try to not repeatedly resize arrays smaller than this + */ + private[ArrayDeque] final val StableSize = 128 + + /** + * Allocates an array whose size is next power of 2 > `len` + * Largest possible len is 1<<30 - 1 + * + * @param len + * @return + */ + private[mutable] def alloc(len: Int) = { + require(len >= 0, s"Non-negative array size required") + val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 + require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") + new Array[AnyRef](Math.max(size, DefaultInitialSize)) + } +} + +trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { + protected def array: Array[AnyRef] + + final override def clone(): C = klone() + + protected def klone(): C + + protected def ofArray(array: Array[AnyRef], end: Int): C + + protected def start_+(idx: Int): Int + + @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = + if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})") + + /** + * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray + * This copies maxItems elements from this collections srcStart to dest's destStart + * If we reach the end of either collections before we could copy maxItems, we simply stop copying + * + * @param dest + * @param srcStart + * @param destStart + * @param maxItems + */ + def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { + requireBounds(destStart, dest.length+1) + val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) + if (toCopy > 0) { + requireBounds(srcStart) + val startIdx = start_+(srcStart) + val block1 = Math.min(toCopy, array.length - startIdx) + Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) + val block2 = toCopy - block1 + if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) + } + dest + } + + override def reverse: C = { + val n = length + val arr = ArrayDeque.alloc(n) + var i = 0 + while(i < n) { + arr(i) = this(n - i - 1).asInstanceOf[AnyRef] + i += 1 + } + ofArray(arr, n) + } + + override def slice(from: Int, until: Int): C = { + val n = length + val left = Math.max(0, Math.min(n, from)) + val right = Math.max(0, Math.min(n, until)) + val len = right - left + if (len <= 0) { + empty + } else if (len >= n) { + klone() + } else { + val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) + ofArray(array2, len) + } + } + + override def sliding(window: Int, step: Int): Iterator[C] = { + require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive") + length match { + case 0 => Iterator.empty + case n if n <= window => Iterator.single(slice(0, length)) + case n => + val lag = if (window > step) window - step else 0 + Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window)) + } + } + + override def grouped(n: Int): Iterator[C] = sliding(n, n) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..74ab6b2107e5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -0,0 +1,347 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.util.Arrays + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + def elemTag = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + def elemTag = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + def elemTag = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + def elemTag = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + def elemTag = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + def elemTag = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + def elemTag = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + def elemTag = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + def elemTag = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala new file mode 100644 index 000000000000..69ecc122c1f9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -0,0 +1,392 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.immutable.Range +import BitSetOps.{LogWL, MaxSize} +import scala.annotation.implicitNotFound + +/** + * A class for mutable bitsets. + * + * $bitsetinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] + * section on `Mutable Bitsets` for more information. + * + * @define Coll `BitSet` + * @define coll bitset + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class BitSet(protected[collection] final var elems: Array[Long]) + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedIterableOps[Int, Set, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) + + def this() = this(0) + + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + override def unsorted: Set[Int] = this + + protected[collection] final def nwords: Int = elems.length + + protected[collection] final def word(idx: Int): Long = + if (idx < nwords) elems(idx) else 0L + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = + if (elems.length == 0) empty + else new BitSet(elems) + + def addOne(elem: Int): this.type = { + require(elem >= 0) + if (!contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + this + } + + def subtractOne(elem: Int): this.type = { + require(elem >= 0) + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } + this + } + + def clear(): Unit = { + elems = new Array[Long](elems.length) + } + + protected final def updateWord(idx: Int, w: Long): Unit = { + ensureCapacity(idx) + elems(idx) = w + } + + protected final def ensureCapacity(idx: Int): Unit = { + require(idx < MaxSize) + if (idx >= nwords) { + var newlen = nwords + while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) + val elems1 = new Array[Long](newlen) + Array.copy(elems, 0, elems1, 0, nwords) + elems = elems1 + } + } + + def unconstrained: collection.Set[Int] = this + + /** Updates this bitset to the union with another bitset by performing a bitwise "or". + * + * @param other the bitset to form the union with. + * @return the bitset itself. + */ + def |= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + elems(i) = elems(i) | other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". + * + * @param other the bitset to form the intersection with. + * @return the bitset itself. + */ + def &= (other: collection.BitSet): this.type = { + // Different from other operations: no need to ensure capacity because + // anything beyond the capacity is 0. Since we use other.word which is 0 + // off the end, we also don't need to make sure we stay in bounds there. + var i = 0 + val thisnwords = nwords + while (i < thisnwords) { + elems(i) = elems(i) & other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". + * + * @param other the bitset to form the symmetric difference with. + * @return the bitset itself. + */ + def ^= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + + elems(i) = elems(i) ^ other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". + * + * @param other the bitset to form the difference with. + * @return the bitset itself. + */ + def &~= (other: collection.BitSet): this.type = { + var i = 0 + val max = Math.min(nwords, other.nwords) + while (i < max) { + elems(i) = elems(i) & ~other.word(i) + i += 1 + } + this + } + + override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) + + def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + override def addAll(xs: IterableOnce[Int]): this.type = xs match { + case bs: collection.BitSet => + this |= bs + case range: Range => + if (range.nonEmpty) { + val start = range.min + if (start >= 0) { + val end = range.max + val endIdx = end >> LogWL + ensureCapacity(endIdx) + + if (range.step == 1 || range.step == -1) { + val startIdx = start >> LogWL + val wordStart = startIdx * BitSetOps.WordLength + val wordMask = -1L << (start - wordStart) + + if (endIdx > startIdx) { + elems(startIdx) |= wordMask + java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) + elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) + } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) + } else super.addAll(range) + } else super.addAll(range) + } + this + + case sorted: collection.SortedSet[Int] => + // if `sorted` is using the regular Int ordering, ensure capacity for the largest + // element up front to avoid multiple resizing allocations + if (sorted.nonEmpty) { + val ord = sorted.ordering + if (ord eq Ordering.Int) { + ensureCapacity(sorted.lastKey >> LogWL) + } else if (ord eq Ordering.Int.reverse) { + ensureCapacity(sorted.firstKey >> LogWL) + } + val iter = sorted.iterator + while (iter.hasNext) { + addOne(iter.next()) + } + } + + this + + case other => + super.addAll(other) + } + + override def subsetOf(that: collection.Set[Int]): Boolean = that match { + case bs: collection.BitSet => + val thisnwords = this.nwords + val bsnwords = bs.nwords + val minWords = Math.min(thisnwords, bsnwords) + + // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there + var i = bsnwords + while (i < thisnwords) { + if (word(i) != 0L) return false + i += 1 + } + + // the higher range of `this` is all `0`s, fall back to lower range + var j = 0 + while (j < minWords) { + if ((word(j) & ~bs.word(j)) != 0L) return false + j += 1 + } + + true + case other => + super.subsetOf(other) + } + + override def subtractAll(xs: IterableOnce[Int]): this.type = xs match { + case bs: collection.BitSet => this &~= bs + case other => super.subtractAll(other) + } + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + i -= 1 + } + + if (i < 0) { + fromBitMaskNoCopy(Array(currentWord)) + } else { + val minimumNonZeroIndex: Int = i + 1 + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newArray) + } + } else { + // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index + val newElems = elems.clone() + var i = bsnwords - 1 + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newElems) + } + case _ => super.diff(that) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word + // index which lets us avoid: + // * over-allocating -- the resulting array will be exactly the right size + // * multiple resizing allocations -- the array is allocated one time, not log(n) times. + var i = nwords - 1 + var newArray: Array[Long] = null + while (i >= 0) { + val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + if (w != 0L) { + if (newArray eq null) { + newArray = new Array(i + 1) + } + newArray(i) = w + } + i -= 1 + } + if (newArray eq null) { + empty + } else { + fromBitMaskNoCopy(newArray) + } + } + + override def filterInPlace(p: Int => Boolean): this.type = { + val thisnwords = nwords + var i = 0 + while (i < thisnwords) { + elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) + i += 1 + } + this + } + + override def toBitMask: Array[Long] = elems.clone() +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) + + def empty: BitSet = new BitSet() + + def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSet(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else new BitSet(elems) + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 0a70c75bac0c..847b924735ce 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -14,7 +14,6 @@ package scala.collection package mutable import scala.annotation.nowarn -import language.experimental.captureChecking /** A `Buffer` is a growable and shrinkable `Seq`. */ @@ -49,19 +48,19 @@ trait Buffer[A] /** Appends the elements contained in a iterable object to this buffer. * @param xs the iterable object containing the elements to append. */ - @`inline` final def appendAll(xs: IterableOnce[A]^): this.type = addAll(xs) + @`inline` final def appendAll(xs: IterableOnce[A]): this.type = addAll(xs) /** Alias for `prepend` */ @`inline` final def +=: (elem: A): this.type = prepend(elem) - def prependAll(elems: IterableOnce[A]^): this.type = { insertAll(0, elems); this } + def prependAll(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } @deprecated("Use prependAll instead", "2.13.0") @`inline` final def prepend(elems: A*): this.type = prependAll(elems) /** Alias for `prependAll` */ - @inline final def ++=:(elems: IterableOnce[A]^): this.type = prependAll(elems) + @inline final def ++=:(elems: IterableOnce[A]): this.type = prependAll(elems) /** Inserts a new element at a given index into this buffer. * @@ -82,7 +81,7 @@ trait Buffer[A] * @throws IndexOutOfBoundsException if `idx` is out of bounds. */ @throws[IndexOutOfBoundsException] - def insertAll(idx: Int, elems: IterableOnce[A]^): Unit + def insertAll(idx: Int, elems: IterableOnce[A]): Unit /** Removes the element at a given index position. * @@ -104,7 +103,7 @@ trait Buffer[A] @throws[IndexOutOfBoundsException] @throws[IllegalArgumentException] def remove(idx: Int, count: Int): Unit - + /** Removes a single element from this buffer, at its first occurrence. * If the buffer does not contain that element, it is unchanged. * @@ -133,7 +132,7 @@ trait Buffer[A] @deprecated("use dropRightInPlace instead", since = "2.13.4") def trimEnd(n: Int): Unit = dropRightInPlace(n) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type // +=, ++=, clear inherited from Growable // Per remark of @ichoran, we should preferably not have these: @@ -181,11 +180,11 @@ trait IndexedBuffer[A] extends IndexedSeq[A] override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer - def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]): this.type = { // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[IterableOnce[A]^](s) + val newElems = new Array[IterableOnce[A]](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 @@ -208,7 +207,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] if (i == j) this else takeInPlace(j) } - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { val replaced0 = math.min(math.max(replaced, 0), length) val i = math.min(math.max(from, 0), length) var j = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/Builder.scala b/tests/pos-special/stdlib/collection/mutable/Builder.scala index dd57cb75da91..0ecc06dff061 100644 --- a/tests/pos-special/stdlib/collection/mutable/Builder.scala +++ b/tests/pos-special/stdlib/collection/mutable/Builder.scala @@ -12,9 +12,6 @@ package scala.collection.mutable -import language.experimental.captureChecking - - /** Base trait for collection builders. * * After calling `result()` the behavior of a Builder (which is not also a [[scala.collection.mutable.ReusableBuilder]]) @@ -23,8 +20,7 @@ import language.experimental.captureChecking * * @see [[scala.collection.mutable.ReusableBuilder]] for Builders which can be reused after calling `result()` */ -trait Builder[-A, +To] extends Growable[A] { - self: Builder[A, To]^ => +trait Builder[-A, +To] extends Growable[A] { self => /** Clears the contents of this builder. * After execution of this method the builder will contain no elements. @@ -55,7 +51,7 @@ trait Builder[-A, +To] extends Growable[A] { * @param coll the collection which serves as a hint for the result's size. * @param delta a correction to add to the `coll.size` to produce the size hint. */ - final def sizeHint(coll: scala.collection.IterableOnce[_]^, delta: Int = 0): Unit = { + final def sizeHint(coll: scala.collection.IterableOnce[_], delta: Int = 0): Unit = { val s = coll.knownSize if (s != -1) sizeHint(s + delta) } @@ -73,7 +69,7 @@ trait Builder[-A, +To] extends Growable[A] { * than collection's size are reduced. */ // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility - final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]^): Unit = { + final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { val s = boundingColl.knownSize if (s != -1) { sizeHint(scala.math.min(s, size)) @@ -81,10 +77,10 @@ trait Builder[-A, +To] extends Growable[A] { } /** A builder resulting from this builder my mapping the result using `f`. */ - def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { + def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo] = new Builder[A, NewTo] { def addOne(x: A): this.type = { self += x; this } def clear(): Unit = self.clear() - override def addAll(xs: IterableOnce[A]^): this.type = { self ++= xs; this } + override def addAll(xs: IterableOnce[A]): this.type = { self ++= xs; this } override def sizeHint(size: Int): Unit = self.sizeHint(size) def result(): NewTo = f(self.result()) override def knownSize: Int = self.knownSize diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..b9598904375d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,117 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + protected val mutationCount: () => Int + + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala new file mode 100644 index 000000000000..940ecf3549ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala @@ -0,0 +1,22 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + + +/** A trait for cloneable collections. + * + * @tparam C Type of the collection, covariant and with reference types as upperbound. + */ +trait Cloneable[+C <: AnyRef] extends scala.Cloneable { + override def clone(): C = super.clone().asInstanceOf[C] +} diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..8542b5b56a01 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,888 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Growable.scala b/tests/pos-special/stdlib/collection/mutable/Growable.scala index 3b5eabac37bf..914742b9013a 100644 --- a/tests/pos-special/stdlib/collection/mutable/Growable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Growable.scala @@ -14,8 +14,6 @@ package scala package collection package mutable -import language.experimental.captureChecking - /** This trait forms part of collections that can be augmented * using a `+=` operator and that can be cleared of all elements using * a `clear` method. @@ -56,7 +54,7 @@ trait Growable[-A] extends Clearable { * @param xs the IterableOnce producing the elements to $add. * @return the $coll itself. */ - def addAll(xs: IterableOnce[A]^): this.type = { + def addAll(xs: IterableOnce[A]): this.type = { if (xs.asInstanceOf[AnyRef] eq this) addAll(Buffer.from(xs)) // avoid mutating under our own iterator else { val it = xs.iterator @@ -68,7 +66,7 @@ trait Growable[-A] extends Clearable { } /** Alias for `addAll` */ - @`inline` final def ++= (xs: IterableOnce[A]^): this.type = addAll(xs) + @`inline` final def ++= (xs: IterableOnce[A]): this.type = addAll(xs) /** @return The number of elements in the collection under construction, if it can be cheaply computed, * -1 otherwise. The default implementation always returns -1. @@ -85,7 +83,7 @@ object Growable { * @tparam A Element type * @return The filled instance */ - def from[A](empty: Growable[A], it: collection.IterableOnce[A]^): empty.type = empty ++= it + def from[A](empty: Growable[A], it: collection.IterableOnce[A]): empty.type = empty ++= it } diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala new file mode 100644 index 000000000000..7e945dffb99e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + + +/** The canonical builder for collections that are growable, i.e. that support an + * efficient `+=` method which adds an element to the collection. + * + * GrowableBuilders can produce only a single instance of the collection they are growing. + * + * @define Coll `GrowingBuilder` + * @define coll growing builder + */ +class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) + extends Builder[Elem, To] { + + def clear(): Unit = elems.clear() + + def result(): To = elems + + def addOne(elem: Elem): this.type = { elems += elem; this } + + override def addAll(xs: IterableOnce[Elem]): this.type = { elems.addAll(xs); this } + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala new file mode 100644 index 000000000000..7ad3cf3869e8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala @@ -0,0 +1,654 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 + +/** This class implements mutable maps using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") +class HashMap[K, V](initialCapacity: Int, loadFactor: Double) + extends AbstractMap[K, V] + with MapOps[K, V, HashMap, HashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with Serializable { + + /* The HashMap class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) + + import HashMap.Node + + /** The actual hash table. */ + private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + // + // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i + // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap + // and that is why unimproveHash simply forwards to this method + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(key: K): Boolean = findNode(key) ne null + + @`inline` private[this] def findNode(key: K): Node[K, V] = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findNode(key, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + sizeHint(xs.knownSize) + + xs match { + case hm: immutable.HashMap[K, V] => + hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) + this + case hm: mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + put0(next.key, next.value, next.hash, getOld = false) + } + this + case lhm: mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val entry = iter.next() + put0(entry.key, entry.value, entry.hash, getOld = false) + } + this + case thatMap: Map[K, V] => + thatMap.foreachEntry { (key: K, value: V) => + put0(key, value, improveHash(key.##), getOld = false) + } + this + case _ => + super.addAll(xs) + } + } + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundNode: Node[K, V] = null + var previousNode: Node[K, V] = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousNode = prev + foundNode = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findNode(nd, nd.next, k, h) + } + + findNode(null, nd, key, hash) + } + + val previousValue = foundNode match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousNode != null) previousNode.next = foundNode.next + else table(indexedHash) = foundNode.next + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundNode.value = newValue + } + nextValue + } + } + + override def subtractAll(xs: IterableOnce[K]): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[K] => + hs.foreachWithHashWhile { (k, h) => + remove0(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[K] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[K] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds a key-value pair to this map + * + * @param key the key to add + * @param value the value to add + * @param hash the **improved** hashcode of `key` (see computeHash) + * @param getOld if true, then the previous value for `key` will be returned, otherwise, false + */ + private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = new Node[K, V](key, hash, value, null) + case old => + var prev: Node[K, V] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if(getOld) Some(old) else null + } + prev = n + n = n.next + } + if(prev eq null) table(idx) = new Node(key, hash, value, old) + else prev.next = new Node(key, hash, value, prev.next) + } + contentSize += 1 + null + } + + private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + private[this] var node: Node[K, V] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[K, V]): A + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): A = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[(K, V)] = + if(size == 0) Iterator.empty + else new HashMapIterator[(K, V)] { + protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) + } + + override def keysIterator: Iterator[K] = + if(size == 0) Iterator.empty + else new HashMapIterator[K] { + protected[this] def extract(nd: Node[K, V]) = nd.key + } + + override def valuesIterator: Iterator[V] = + if(size == 0) Iterator.empty + else new HashMapIterator[V] { + protected[this] def extract(nd: Node[K, V]) = nd.value + } + + + /** Returns an iterator over the nodes stored in this HashMap */ + private[collection] def nodeIterator: Iterator[Node[K, V]] = + if(size == 0) Iterator.empty + else new HashMapIterator[Node[K, V]] { + protected[this] def extract(nd: Node[K, V]) = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). + asInstanceOf[S with EfficientSplit] + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + if (newlen < 0) + throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[K, V] = preLow + var lastHigh: Node[K, V] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd.value) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd.value + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findNode(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findNode(key, hash) + } + if(nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def remove(key: K): Option[V] = remove0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreachEntry(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) + + override def filterInPlace(p: (K, V) => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key, head.value)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key, next.value)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) + private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val len = table.length + var i = 0 + while (i < len) { + var n = table(i) + while (n ne null) { + n.value = f(n.key, n.value) + n = n.next + } + i += 1 + } + this + } + + override def mapFactory: MapFactory[HashMap] = HashMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "HashMap" + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new HashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[K, V]): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + def empty[K, V]: HashMap[K, V] = new HashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]): HashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashMap[K, V](cap, defaultLoadFactor).addAll(it) + } + + def newBuilder[K, V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { + def key: K = _key + def hash: Int = _hash + def value: V = _value + def value_= (v: V): Unit = _value = v + def next: Node[K, V] = _next + def next_= (n: Node[K, V]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K, V] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: ((K, V)) => U): Unit = { + f((_key, _value)) + if(_next ne null) _next.foreach(f) + } + + @tailrec + def foreachEntry[U](f: (K, V) => U): Unit = { + f(_key, _value) + if(_next ne null) _next.foreachEntry(f) + } + + override def toString = s"Node($key, $value, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala new file mode 100644 index 000000000000..425721a41626 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala @@ -0,0 +1,456 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 + +/** This class implements mutable sets using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class HashSet[A](initialCapacity: Int, loadFactor: Double) + extends AbstractSet[A] + with SetOps[A, HashSet, HashSet[A]] + with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with Serializable { + + def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) + + import HashSet.Node + + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this element */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(elem: A): Boolean = findNode(elem) ne null + + @`inline` private[this] def findNode(elem: A): Node[A] = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case nd => nd.findNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def add(elem: A) : Boolean = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + addElem(elem, computeHash(elem)) + } + + override def addAll(xs: IterableOnce[A]): this.type = { + sizeHint(xs.knownSize) + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case _ => super.addAll(xs) + } + } + + override def subtractAll(xs: IterableOnce[A]): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHashWhile { (k, h) => + remove(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds an element to this set + * @param elem element to add + * @param hash the **improved** hash of `elem` (see computeHash) + */ + private[this] def addElem(elem: A, hash: Int) : Boolean = { + val idx = index(hash) + table(idx) match { + case null => + table(idx) = new Node(elem, hash, null) + case old => + var prev: Node[A] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + if(prev eq null) + table(idx) = new Node(elem, hash, old) + else + prev.next = new Node(elem, hash, prev.next) + } + contentSize += 1 + true + } + + private[this] def remove(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) + + private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { + private[this] var i = 0 + private[this] var node: Node[A] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[A]): B + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): B = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[A] = new HashSetIterator[A] { + override protected[this] def extract(nd: Node[A]): A = nd.key + } + + /** Returns an iterator over the nodes stored in this HashSet */ + private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { + override protected[this] def extract(nd: Node[A]): Node[A] = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) + val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[A] = preLow + var lastHigh: Node[A] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + /* + private[mutable] def checkTable(): Unit = { + var i = 0 + var count = 0 + var prev: Node[A] = null + while(i < table.length) { + var n = table(i) + prev = null + while(n != null) { + count += 1 + assert(index(n.hash) == i) + if(prev ne null) assert(prev.hash <= n.hash) + prev = n + n = n.next + } + i += 1 + } + assert(contentSize == count) + } + */ + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + @`inline` def addOne(elem: A): this.type = { add(elem); this } + + @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: A => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) + + override protected[this] def className = "HashSet" + + override def hashCode: Int = { + val setIterator = this.iterator + val hashIterator: Iterator[Any] = + if (setIterator.isEmpty) setIterator + else new HashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[A]): Any = { + hash = unimproveHash(nd.hash) + this + } + } + MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + def from[B](it: scala.collection.IterableOnce[B]): HashSet[B] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashSet[B](cap, defaultLoadFactor) ++= it + } + + def empty[A]: HashSet[A] = new HashSet[A] + + def newBuilder[A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { + def key: K = _key + def hash: Int = _hash + def next: Node[K] = _next + def next_= (n: Node[K]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: K => U): Unit = { + f(_key) + if(_next ne null) _next.foreach(f) + } + + override def toString = s"Node($key, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala new file mode 100644 index 000000000000..4153bd532163 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala @@ -0,0 +1,417 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import collection.{AbstractIterator, Iterator} + +import java.lang.Integer.{numberOfLeadingZeros, rotateRight} +import scala.util.hashing.byteswap32 + +import java.lang.Integer + +/** This class can be used to construct data structures that are based + * on hashtables. Class `HashTable[A]` implements a hashtable + * that maps keys of type `A` to values of the fully abstract + * member type `Entry`. Classes that make use of `HashTable` + * have to provide an implementation for `Entry`. + * + * There are mainly two parameters that affect the performance of a hashtable: + * the initial size and the load factor. The size + * refers to the number of buckets in the hashtable, and the load + * factor is a measure of how full the hashtable is allowed to get before + * its size is automatically doubled. Both parameters may be changed by + * overriding the corresponding values in class `HashTable`. + * + * @tparam A type of the elements contained in this hash table. + */ +// Not used in the standard library, but used in scala-parallel-collections +private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { + // Replacing Entry type parameter by abstract type member here allows to not expose to public + // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. + // However, I'm afraid it's too late now for such breaking change. + import HashTable._ + + protected var _loadFactor = defaultLoadFactor + + /** The actual hash table. + */ + protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) + + /** The number of mappings contained in this hash table. + */ + protected[collection] var tableSize: Int = 0 + + final def size: Int = tableSize + + /** The next size value at which to resize (capacity * load factor). + */ + protected[collection] var threshold: Int = initialThreshold(_loadFactor) + + /** The array keeping track of the number of elements in 32 element blocks. + */ + protected var sizemap: Array[Int] = null + + protected var seedvalue: Int = tableSizeSeed + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + + /** The initial size of the hash table. + */ + protected def initialSize: Int = 16 + + /** The initial threshold. + */ + private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) + + private def initialCapacity = capacity(initialSize) + + private def lastPopulatedIndex = { + var idx = table.length - 1 + while (table(idx) == null && idx > 0) + idx -= 1 + + idx + } + + /** + * Initializes the collection from the input stream. `readEntry` will be called for each + * entry to be read from the input stream. + */ + private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { + _loadFactor = in.readInt() + assert(_loadFactor > 0) + + val size = in.readInt() + tableSize = 0 + assert(size >= 0) + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() + + table = new Array(capacity(sizeForThreshold(_loadFactor, size))) + threshold = newThreshold(_loadFactor, table.length) + + if (smDefined) sizeMapInit(table.length) else sizemap = null + + var index = 0 + while (index < size) { + addEntry(readEntry) + index += 1 + } + } + + /** + * Serializes the collection to the output stream by saving the load factor, collection + * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. + * + * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To + * deserialize, `init` should be used. + */ + private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { + out.writeInt(_loadFactor) + out.writeInt(tableSize) + out.writeInt(seedvalue) + out.writeBoolean(isSizeMapDefined) + + foreachEntry(writeEntry) + } + + /** Find entry with given key in table, null if not found. + */ + final def findEntry(key: A): Entry = + findEntry0(key, index(elemHashCode(key))) + + protected[collection] final def findEntry0(key: A, h: Int): Entry = { + var e = table(h).asInstanceOf[Entry] + while (e != null && !elemEquals(e.key, key)) e = e.next + e + } + + /** Add entry to table + * pre: no entry with same key exists + */ + protected[collection] final def addEntry(e: Entry): Unit = { + addEntry0(e, index(elemHashCode(e.key))) + } + + protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { + e.next = table(h).asInstanceOf[Entry] + table(h) = e + tableSize = tableSize + 1 + nnSizeMapAdd(h) + if (tableSize > threshold) + resize(2 * table.length) + } + + /** Find entry with given key in table, or add new one if not found. + * May be somewhat faster then `findEntry`/`addEntry` pair as it + * computes entry's hash index only once. + * Returns entry found in table or null. + * New entries are created by calling `createNewEntry` method. + */ + def findOrAddEntry(key: A, value: B): Entry = { + val h = index(elemHashCode(key)) + val e = findEntry0(key, h) + if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } + } + + /** Creates new entry to be immediately inserted into the hashtable. + * This method is guaranteed to be called only once and in case that the entry + * will be added. In other words, an implementation may be side-effecting. + */ + def createNewEntry(key: A, value: B): Entry + + /** Remove entry from table if present. + */ + final def removeEntry(key: A) : Entry = { + removeEntry0(key, index(elemHashCode(key))) + } + /** Remove entry from table if present. + */ + private[collection] final def removeEntry0(key: A, h: Int) : Entry = { + var e = table(h).asInstanceOf[Entry] + if (e != null) { + if (elemEquals(e.key, key)) { + table(h) = e.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e.next = null + return e + } else { + var e1 = e.next + while (e1 != null && !elemEquals(e1.key, key)) { + e = e1 + e1 = e1.next + } + if (e1 != null) { + e.next = e1.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e1.next = null + return e1 + } + } + } + null + } + + /** An iterator returning all entries. + */ + def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + def hasNext = es != null + def next() = { + val res = es + es = es.next + while (es == null && idx > 0) { + idx = idx - 1 + es = iterTable(idx) + } + res.asInstanceOf[Entry] + } + } + + /** Avoid iterator for a 2x faster traversal. */ + def foreachEntry[U](f: Entry => U): Unit = { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + while (es != null) { + val next = es.next // Cache next in case f removes es. + f(es.asInstanceOf[Entry]) + es = next + + while (es == null && idx > 0) { + idx -= 1 + es = iterTable(idx) + } + } + } + + /** Remove all entries from table + */ + def clearTable(): Unit = { + var i = table.length - 1 + while (i >= 0) { table(i) = null; i = i - 1 } + tableSize = 0 + nnSizeMapReset(0) + } + + private def resize(newSize: Int): Unit = { + val oldTable = table + table = new Array(newSize) + nnSizeMapReset(table.length) + var i = oldTable.length - 1 + while (i >= 0) { + var e = oldTable(i) + while (e != null) { + val h = index(elemHashCode(e.key)) + val e1 = e.next + e.next = table(h).asInstanceOf[Entry] + table(h) = e + e = e1 + nnSizeMapAdd(h) + } + i = i - 1 + } + threshold = newThreshold(_loadFactor, newSize) + } + + /* Size map handling code */ + + /* + * The following three sizeMap* functions (Add, Remove, Reset) + * are used to update the size map of the hash table. + * + * The size map logically divides the hash table into `sizeMapBucketSize` element buckets + * by keeping an integer entry for each such bucket. Each integer entry simply denotes + * the number of elements in the corresponding bucket. + * Best understood through an example, see: + * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) + * sizemap = [ 2 | 3 ] (2 entries) + * where sizeMapBucketSize == 4. + * + * By default the size map is not initialized, so these methods don't do anything, thus, + * their impact on hash table performance is negligible. However, if the hash table + * is converted into a parallel hash table, the size map is initialized, as it will be needed + * there. + */ + protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) += 1 + } + + protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) -= 1 + } + + protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { + val nsize = calcSizeMapSize(tableLength) + if (sizemap.length != nsize) sizemap = new Array[Int](nsize) + else java.util.Arrays.fill(sizemap, 0) + } + + private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize + + protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 + + // discards the previous sizemap and only allocates a new one + protected def sizeMapInit(tableLength: Int): Unit = { + sizemap = new Array[Int](calcSizeMapSize(tableLength)) + } + + // discards the previous sizemap and populates the new one + protected final def sizeMapInitAndRebuild() = { + sizeMapInit(table.length) + + // go through the buckets, count elements + var tableidx = 0 + var bucketidx = 0 + val tbl = table + var tableuntil = 0 + if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize + val totalbuckets = totalSizeMapBuckets + while (bucketidx < totalbuckets) { + var currbucketsize = 0 + while (tableidx < tableuntil) { + var e = tbl(tableidx) + while (e ne null) { + currbucketsize += 1 + e = e.next + } + tableidx += 1 + } + sizemap(bucketidx) = currbucketsize + tableuntil += sizeMapBucketSize + bucketidx += 1 + } + } + + private[collection] def printSizeMap() = { + println(sizemap.to(collection.immutable.List)) + } + + protected final def sizeMapDisable() = sizemap = null + + protected final def isSizeMapDefined = sizemap ne null + + // override to automatically initialize the size map + protected def alwaysInitSizeMap = false + + /* End of size map handling code */ + + protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) + + /** + * Note: we take the most significant bits of the hashcode, not the lower ones + * this is of crucial importance when populating the table in parallel + */ + protected[collection] final def index(hcode: Int): Int = { + val ones = table.length - 1 + val exponent = Integer.numberOfLeadingZeros(ones) + (improve(hcode, seedvalue) >>> exponent) & ones + } +} + +private[collection] object HashTable { + /** The load factor for the hash table (in 0.001 step). + */ + private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% + private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible + + private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt + + private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt + + private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) + + trait HashUtils[KeyType] { + protected final def sizeMapBucketBitSize = 5 + // so that: + protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize + + protected[collection] def elemHashCode(key: KeyType) = key.## + + /** + * Defer to a high-quality hash in [[scala.util.hashing]]. + * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. + *

+ * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 + * {{{ + * var h: Int = hcode + ~(hcode << 9) + * h = h ^ (h >>> 14) + * h = h + (h << 4) + * h ^ (h >>> 10) + * }}} + * the rest of the computation is due to SI-5293 + */ + protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) + } + + /** + * Returns a power of two >= `target`. + */ + private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** Class used internally. + */ +private[collection] trait HashEntry[A, E <: HashEntry[A, E]] { + val key: A + var next: E = _ +} diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala new file mode 100644 index 000000000000..c801f073fb0d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala @@ -0,0 +1,31 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + + +/** + * Reusable builder for immutable collections + */ +abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) + extends ReusableBuilder[A, C] { + + protected var elems: C = empty + + def clear(): Unit = { elems = empty } + + def result(): C = elems + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala new file mode 100644 index 000000000000..24d54905de22 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala @@ -0,0 +1,83 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +trait IndexedSeq[T] extends Seq[T] + with scala.collection.IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] { + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) + +trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] + extends scala.collection.IndexedSeqOps[A, CC, C] + with SeqOps[A, CC, C] { + + /** Modifies this $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return this $coll modified by replacing all elements with the + * result of applying the given function `f` to each element + * of this $coll. + */ + def mapInPlace(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.SeqOps.sorted]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + val len = this.length + if (len > 1) { + val arr = new Array[AnyRef](len) + var i = 0 + for (x <- this) { + arr(i) = x.asInstanceOf[AnyRef] + i += 1 + } + java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) + i = 0 + while (i < arr.length) { + update(i, arr(i).asInstanceOf[A]) + i += 1 + } + } + this + } + + /** Sorts this $coll in place according to a comparison function. + * + * @see [[scala.collection.SeqOps.sortWith]] + */ + def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) + + /** Sorts this $coll in place according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.collection.SeqOps.sortBy]] + */ + def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/Iterable.scala b/tests/pos-special/stdlib/collection/mutable/Iterable.scala index bf286157b376..d05aeed88044 100644 --- a/tests/pos-special/stdlib/collection/mutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Iterable.scala @@ -13,13 +13,11 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults} -import language.experimental.captureChecking trait Iterable[A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { - this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } @@ -33,5 +31,4 @@ trait Iterable[A] object Iterable extends IterableFactory.Delegate[Iterable](ArrayBuffer) /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]: - this: AbstractIterable[A]^ => +abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala new file mode 100644 index 000000000000..bc663f1d37d8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala @@ -0,0 +1,509 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 + + +/** This class implements mutable maps using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `LinkedHashMap` + * @define coll linked hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") +class LinkedHashMap[K, V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] + with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap + + // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper + // would not return the elements in insertion order + + private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] + + private[collection] def _firstEntry: Entry = firstEntry + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: (K, V) = + if (size > 0) (lastEntry.key, lastEntry.value) + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") + + override def lastOption: Option[(K, V)] = + if (size > 0) Some((lastEntry.key, lastEntry.value)) + else None + + override def head: (K, V) = + if (size > 0) (firstEntry.key, firstEntry.value) + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") + + override def headOption: Option[(K, V)] = + if (size > 0) Some((firstEntry.key, firstEntry.value)) + else None + + override def size = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def get(key: K): Option[V] = { + val e = findEntry(key) + if (e == null) None + else Some(e.value) + } + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def contains(key: K): Boolean = { + if (getClass eq classOf[LinkedHashMap[_, _]]) + findEntry(key) != null + else + super.contains(key) // A subclass might override `get`, use the default implementation `contains`. + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def remove(key: K): Option[V] = removeEntry0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findEntry(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findEntry(key, hash) + } + if (nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if (contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def removeEntry0(elem: K, hash: Int): Entry = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: K): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + def addOne(kv: (K, V)): this.type = { + put(kv._1, kv._2) + this + } + + def subtractOne(key: K): this.type = { + remove(key) + this + } + + private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[(K, V)] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[(K, V)] { + def extract(nd: Entry): (K, V) = (nd.key, nd.value) + } + + protected class LinkedKeySet extends KeySet { + override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet + } + + override def keySet: collection.Set[K] = new LinkedKeySet + + override def keysIterator: Iterator[K] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[K] { + def extract(nd: Entry): K = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[Entry] { + def extract(nd: Entry): Entry = nd + } + + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundEntry: Entry = null + var previousEntry: Entry = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousEntry = prev + foundEntry = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findEntry(nd, nd.next, k, h) + } + + findEntry(null, nd, key, hash) + } + + val previousValue = foundEntry match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousEntry != null) previousEntry.next = foundEntry.next + else table(indexedHash) = foundEntry.next + deleteEntry(foundEntry) + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundEntry.value = newValue + } + nextValue + } + } + + override def valuesIterator: Iterator[V] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[V] { + def extract(nd: Entry): V = nd.value + } + + + override def foreach[U](f: ((K, V)) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f((cur.key, cur.value)) + cur = cur.later + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key, cur.value) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { + val e = new Entry(key, hash, value) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = createNewEntry(key, hash, value) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if (getOld) Some(old) else null + } + prev = n + n = n.next + } + val nnode = createNewEntry(key, hash, value) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + null + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new LinkedHashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashMap" +} + +/** $factoryInfo + * @define Coll `LinkedHashMap` + * @define coll linked hash map + */ +@SerialVersionUID(3L) +object LinkedHashMap extends MapFactory[LinkedHashMap] { + + def empty[K, V] = new LinkedHashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]) = { + val newlhm = empty[K, V] + newlhm.sizeHint(it.knownSize) + newlhm.addAll(it) + newlhm + } + + def newBuilder[K, V] = new GrowableBuilder(empty[K, V]) + + /** Class for the linked hash map entry, used internally. + */ + private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { + var earlier: LinkedEntry[K, V] = null + var later: LinkedEntry[K, V] = null + var next: LinkedEntry[K, V] = null + + @tailrec + final def findEntry(k: K, h: Int): LinkedEntry[K, V] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala new file mode 100644 index 000000000000..0c01f8ea79ea --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala @@ -0,0 +1,348 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 + +/** This class implements mutable sets using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam A the type of the elements contained in this set. + * + * @define Coll `LinkedHashSet` + * @define coll linked hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") +class LinkedHashSet[A] + extends AbstractSet[A] + with SetOps[A, LinkedHashSet, LinkedHashSet[A]] + with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] + with IterableFactoryDefaults[A, LinkedHashSet] + with DefaultSerializable { + + override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet + + // stepper is not overridden to use XTableStepper because that stepper would not return the + // elements in insertion order + + /*private*/ type Entry = LinkedHashSet.Entry[A] + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: A = + if (size > 0) lastEntry.key + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") + + override def lastOption: Option[A] = + if (size > 0) Some(lastEntry.key) + else None + + override def head: A = + if (size > 0) firstEntry.key + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") + + override def headOption: Option[A] = + if (size > 0) Some(firstEntry.key) + else None + + override def size: Int = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def contains(elem: A): Boolean = findEntry(elem) ne null + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def add(elem: A): Boolean = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(elem) + put0(elem, hash, index(hash)) + } + + def addOne(elem: A): this.type = { + add(elem) + this + } + + def subtractOne(elem: A): this.type = { + remove(elem) + this + } + + override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) + + private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[A] = new LinkedHashSetIterator[A] { + override def extract(nd: Entry): A = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { + override def extract(nd: Entry): Entry = nd + } + + override def foreach[U](f: A => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt + + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: A): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: A, hash: Int): Entry = { + val e = new Entry(key, hash) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { + table(idx) match { + case null => + table(idx) = createNewEntry(elem, hash) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + val nnode = createNewEntry(elem, hash) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + true + } + + private[this] def remove0(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[A], 0) + val preHigh = new Entry(null.asInstanceOf[A], 0) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + val setHashIterator = + if (isEmpty) this.iterator + else { + new LinkedHashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = unimproveHash(nd.hash) + this + } + } + } + MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashSet" +} + +/** $factoryInfo + * @define Coll `LinkedHashSet` + * @define coll linked hash set + */ +@SerialVersionUID(3L) +object LinkedHashSet extends IterableFactory[LinkedHashSet] { + + override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + + def from[E](it: collection.IterableOnce[E]) = { + val newlhs = empty[E] + newlhs.sizeHint(it.knownSize) + newlhs.addAll(it) + newlhs + } + + def newBuilder[A] = new GrowableBuilder(empty[A]) + + /** Class for the linked hash set entry, used internally. + */ + private[mutable] final class Entry[A](val key: A, val hash: Int) { + var earlier: Entry[A] = null + var later: Entry[A] = null + var next: Entry[A] = null + + @tailrec + final def findEntry(k: A, h: Int): Entry[A] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} + diff --git a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala index 4f607c770130..d66525763163 100644 --- a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala @@ -19,8 +19,6 @@ import java.lang.{IllegalArgumentException, IndexOutOfBoundsException} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence -import scala.annotation.unchecked.uncheckedCaptures -import language.experimental.captureChecking /** A `Buffer` implementation backed by a list. It provides constant time * prepend and append. Most other operations are linear. @@ -38,7 +36,7 @@ import language.experimental.captureChecking * @define willNotTerminateInf */ @SerialVersionUID(-8428291952499836345L) -class ListBuffer[sealed A] +class ListBuffer[A] extends AbstractBuffer[A] with SeqOps[A, ListBuffer, ListBuffer[A]] with StrictOptimizedSeqOps[A, ListBuffer, ListBuffer[A]] @@ -123,7 +121,7 @@ class ListBuffer[sealed A] } // MUST only be called on fresh instances - private def freshFrom(xs: IterableOnce[A]^): this.type = { + private def freshFrom(xs: IterableOnce[A]): this.type = { val it = xs.iterator if (it.hasNext) { var len = 1 @@ -142,7 +140,7 @@ class ListBuffer[sealed A] this } - override final def addAll(xs: IterableOnce[A]^): this.type = { + override final def addAll(xs: IterableOnce[A]): this.type = { val it = xs.iterator if (it.hasNext) { val fresh = new ListBuffer[A].freshFrom(it) @@ -250,7 +248,7 @@ class ListBuffer[sealed A] } } - def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { if (idx < 0 || idx > len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") val it = elems.iterator if (it.hasNext) { @@ -307,7 +305,7 @@ class ListBuffer[sealed A] this } - def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]): this.type = { mutationCount += 1 var src = first var dst: List[A] = null @@ -347,7 +345,7 @@ class ListBuffer[sealed A] this } - def patchInPlace(from: Int, patch: collection.IterableOnce[A]^, replaced: Int): this.type = { + def patchInPlace(from: Int, patch: collection.IterableOnce[A], replaced: Int): this.type = { val _len = len val _from = math.max(from, 0) // normalized val _replaced = math.max(replaced, 0) // normalized @@ -397,9 +395,9 @@ class ListBuffer[sealed A] @SerialVersionUID(3L) object ListBuffer extends StrictOptimizedSeqFactory[ListBuffer] { - def from[sealed A](coll: collection.IterableOnce[A]^): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) + def from[A](coll: collection.IterableOnce[A]): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) - def newBuilder[sealed A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) + def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) - def empty[A]: ListBuffer[A] = new ListBuffer[A @uncheckedCaptures] + def empty[A]: ListBuffer[A] = new ListBuffer[A] } diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala new file mode 100644 index 000000000000..7cc5aa227757 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala @@ -0,0 +1,82 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.List + +/** A simple mutable map backed by a list, so it preserves insertion order. + * + * @tparam K the type of the keys contained in this list map. + * @tparam V the type of the values assigned to keys in this list map. + * + * @define Coll `mutable.ListMap` + * @define coll mutable list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +class ListMap[K, V] + extends AbstractMap[K, V] + with MapOps[K, V, ListMap, ListMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + private[this] var elems: List[(K, V)] = List() + private[this] var siz: Int = 0 + + def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) + def iterator: Iterator[(K, V)] = elems.iterator + + final override def addOne(kv: (K, V)) = { + val (e, key0) = remove(kv._1, elems, List()) + elems = (key0, kv._2) :: e + siz += 1; this + } + + final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } + + @tailrec + private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { + if (elems.isEmpty) (acc, key) + else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } + else remove(key, elems.tail, elems.head :: acc) + } + + final override def clear(): Unit = { elems = List(); siz = 0 } + + final override def size: Int = siz + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override protected[this] def stringPrefix = "ListMap" +} + +/** $factoryInfo + * @define Coll `mutable.ListMap` + * @define coll mutable list map + */ +@SerialVersionUID(3L) +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +object ListMap extends MapFactory[ListMap] { + def empty[K, V]: ListMap[K, V] = new ListMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) +} diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala new file mode 100644 index 000000000000..af34ca4ab8c9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala @@ -0,0 +1,673 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions + +/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically substantially faster with `LongMap` than [[HashMap]]. Methods + * that act on the whole map, including `foreach` and `map` are not in + * general expected to be faster than with a generic map, save for those + * that take particular advantage of the internal structure of the map: + * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `LongMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29 entries (approximately + * 500 million). The maximum capacity is 2^30, but performance will degrade + * rapidly as 2^30 is approached. + * + */ +final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[Long, V] + with MapOps[Long, V, Map, LongMap[V]] + with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] + with Serializable { + import LongMap._ + + def this() = this(LongMap.exceptionDefault, 16, true) + + // TODO: override clear() with an optimization more tailored for efficiency. + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]): LongMap[V] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) + + /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) + + /** Creates a new `LongMap` with an initial buffer of specified size. + * + * A LongMap can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `LongMap` with specified default values and initial buffer size. */ + def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var extraKeys: Int = 0 + private[this] var zeroValue: AnyRef = null + private[this] var minValue: AnyRef = null + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _keys: Array[Long] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int) = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] + ): Unit = { + mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz + } + + override def size: Int = _size + (extraKeys+1)/2 + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override def empty: LongMap[V] = new LongMap() + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def toIndex(k: Long): Int = { + // Part of the MurmurHash3 32 bit finalizer + val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt + val x = (h ^ (h >>> 16)) * 0x85EBCA6B + (x ^ (x >>> 13)) & mask + } + + private def seekEmpty(k: Long): Int = { + var e = toIndex(k) + var x = 0 + while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e + } + + private def seekEntry(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e | MissingBit + } + + private def seekEntryOrOpen(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (q == 0) return e | MissingBit + val o = e | MissVacant + while ({ q = _keys(e); if (q==k) return e; q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + o + } + + override def contains(key: Long): Boolean = { + if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 + else seekEntry(key) >= 0 + } + + override def get(key: Long): Option[V] = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) None + else if (key == 0) Some(zeroValue.asInstanceOf[V]) + else Some(minValue.asInstanceOf[V]) + } + else { + val i = seekEntry(key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + } + + override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) default + else if (key == 0) zeroValue.asInstanceOf[V1] + else minValue.asInstanceOf[V1] + } + else { + val i = seekEntry(key) + if (i < 0) default else _values(i).asInstanceOf[V1] + } + } + + override def getOrElseUpdate(key: Long, defaultValue: => V): V = { + if (key == -key) { + val kbits = (key>>>63).toInt + 1 + if ((kbits & extraKeys) == 0) { + val value = defaultValue + extraKeys |= kbits + if (key == 0) zeroValue = value.asInstanceOf[AnyRef] + else minValue = value.asInstanceOf[AnyRef] + value + } + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + var i = seekEntryOrOpen(key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val ok = _keys + val ans = defaultValue + if (ok ne _keys) { + i = seekEntryOrOpen(key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead. + */ + override def apply(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + } + + /** The user-supplied default value for the key. Throws an exception + * if no other default behavior was specified. + */ + override def default(key: Long) = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val ok = _keys + val ov = _values + mask = newMask + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < ok.length) { + val k = ok(i) + if (k != -k) { + val j = seekEmpty(k) + _keys(j) = k + _values(j) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: Long, value: V): Option[V] = { + if (key == -key) { + if (key == 0) { + val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + ans + } + else { + val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + ans + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to a `LongMap`. + */ + override def update(key: Long, value: V): Unit = { + if (key == -key) { + if (key == 0) { + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + } + else { + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + } + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: Long, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: Long): this.type = { + if (key == -key) { + if (key == 0L) { + extraKeys &= 0x2 + zeroValue = null + } + else { + extraKeys &= 0x1 + minValue = null + } + } + else { + val i = seekEntry(key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _keys(i) = Long.MinValue + _values(i) = null + } + } + this + } + + def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var nextPair: (Long, V) = + if (extraKeys==0) null + else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) + else (Long.MinValue, minValue.asInstanceOf[V]) + + private[this] var anotherPair: (Long, V) = + if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) + else null + + private[this] var index = 0 + + def hasNext: Boolean = nextPair != null || (index < kz.length && { + var q = kz(index) + while (q == -q) { + index += 1 + if (index >= kz.length) return false + q = kz(index) + } + nextPair = (kz(index), vz(index).asInstanceOf[V]) + index += 1 + true + }) + def next() = { + if (nextPair == null && !hasNext) throw new NoSuchElementException("next") + val ans = nextPair + if (anotherPair != null) { + nextPair = anotherPair + anotherPair = null + } + else nextPair = null + ans + } + } + + // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. + override def keysIterator: Iterator[Long] = super.keysIterator + override def valuesIterator: Iterator[V] = super.valuesIterator + + override def foreach[U](f: ((Long,V)) => U): Unit = { + if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) + if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f((k, _values(i).asInstanceOf[V])) + } + i += 1 + } + } + + override def foreachEntry[U](f: (Long,V) => U): Unit = { + if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k, _values(i).asInstanceOf[V]) + } + i += 1 + } + } + + override def clone(): LongMap[V] = { + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val lm = new LongMap[V](defaultEntry, 1, false) + lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) + lm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + lm += kv + lm + } + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + xs.iterator.foreach(kv => lm += kv) + lm + } + + override def ++ [V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = + clone().asInstanceOf[LongMap[V1]].addOne(key, value) + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: Long => A): Unit = { + if ((extraKeys & 1) == 1) f(0L) + if ((extraKeys & 2) == 2) f(Long.MinValue) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k) + } + i += 1 + } + } + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = { + if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(_values(i).asInstanceOf[V]) + } + i += 1 + } + } + + /** Creates a new `LongMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[V1](f: V => V1): LongMap[V1] = { + val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) + lm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] + if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + def map[V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + def collect[V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) + + override protected[this] def className = "LongMap" +} + +object LongMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) + + /** A builder for instances of `LongMap`. + * + * This builder can be reused to create multiple instances. + */ + final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { + private[collection] var elems: LongMap[V] = new LongMap[V] + override def addOne(entry: (Long, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new LongMap[V] + def result(): LongMap[V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `LongMap` with zero or more key/value pairs. */ + def apply[V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + + private def buildFromIterableOnce[V](elems: IterableOnce[(Long, V)]): LongMap[V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val lm = new LongMap[V](sz * 2) + elems.iterator.foreach{ case (k,v) => lm(k) = v } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new empty `LongMap`. */ + def empty[V]: LongMap[V] = new LongMap[V] + + /** Creates a new empty `LongMap` with the supplied default */ + def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) + + /** Creates a new `LongMap` from an existing source collection. A source collection + * which is already a `LongMap` gets cloned. + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new `LongMap` with the elements of `source` + */ + def from[V](source: IterableOnce[(Long, V)]): LongMap[V] = source match { + case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] + case _ => buildFromIterableOnce(source) + } + + def newBuilder[V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + + /** Creates a new `LongMap` from arrays of keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { + val sz = math.min(keys.length, values.length) + val lm = new LongMap[V](sz * 2) + var i = 0 + while (i < sz) { lm(keys(i)) = values(i); i += 1 } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new `LongMap` from keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { + val sz = math.min(keys.size, values.size) + val lm = new LongMap[V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() + if (lm.size < (sz >> 3)) lm.repack() + lm + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala new file mode 100644 index 000000000000..610dc01029cc --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Map.scala @@ -0,0 +1,268 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** Base type of mutable Maps */ +trait Map[K, V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with Growable[(K, V)] + with Shrinkable[K] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + /* + //TODO consider keeping `remove` because it returns the removed entry + @deprecated("Use subtract or -= instead of remove", "2.13.0") + def remove(key: K): Option[V] = { + val old = get(key) + if(old.isDefined) subtract(key) + old + } + */ + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault(d: K => V): Map[K, V] = new Map.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) +} + +/** + * @define coll mutable map + * @define Coll `mutable.Map` + */ +trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] + with Cloneable[C] + with Builder[(K, V), C] + with Growable[(K, V)] + with Shrinkable[K] { + + def result(): C = coll + + @deprecated("Use - or remove on an immutable Map", "2.13.0") + final def - (key: K): C = clone() -= key + + @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") + final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys + + /** Adds a new key/value pair to this map and optionally returns previously bound value. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key + * before the `put` operation was executed, or `None` if `key` + * was not defined in the map before. + */ + def put(key: K, value: V): Option[V] = { + val r = get(key) + update(key, value) + r + } + + /** Adds a new key/value pair to this map. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update(key: K, value: V): Unit = { coll += ((key, value)) } + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = this.get(key) + val nextValue = remappingFunction(previousValue) + (previousValue, nextValue) match { + case (None, None) => // do nothing + case (Some(_), None) => this.remove(key) + case (_, Some(v)) => this.update(key,v) + } + nextValue + } + + /** If given key is already in this map, returns associated value. + * + * Otherwise, computes value from given expression `op`, stores with key + * in map and returns that value. + * + * Concurrent map implementations may evaluate the expression `op` + * multiple times, or may evaluate `op` without inserting the result. + * + * @param key the key to test + * @param op the computation yielding the value to associate with `key`, if + * `key` is previously unbound. + * @return the value associated with key (either previously or as a result + * of executing the method). + */ + def getOrElseUpdate(key: K, op: => V): V = + get(key) match { + case Some(v) => v + case None => val d = op; this(key) = d; d + } + + /** Removes a key from this map, returning the value associated previously + * with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the map before. + */ + def remove(key: K): Option[V] = { + val r = get(key) + if (r.isDefined) this -= key + r + } + + def clear(): Unit = { keysIterator foreach -= } + + override def clone(): C = empty ++= this + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) + + /** Retains only those mappings for which the predicate + * `p` returns `true`. + * + * @param p The test predicate + */ + def filterInPlace(p: (K, V) => Boolean): this.type = { + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 + } + } + this + } + + @deprecated("Use mapValuesInPlace instead", "2.13.0") + @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) + + /** Applies a transformation function to all values contained in this map. + * The transformation function produces new values from existing keys + * associated values. + * + * @param f the transformation to apply + * @return the map itself. + */ + def mapValuesInPlace(f: (K, V) => V): this.type = { + if (!isEmpty) this match { + case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) + case _ => + val array = this.toArray[Any] + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + update(k, f(k, v)) + i += 1 + } + } + this + } + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable map + * @define Coll `mutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](HashMap) { + + @SerialVersionUID(3L) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K => V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + override def default(key: K): V = defaultValue(key) + + def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = underlying.knownSize + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def clear(): Unit = underlying.clear() + + def get(key: K): Option[V] = underlying.get(key) + + def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): Map[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala new file mode 100644 index 000000000000..13d7c35e0165 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + + +/** A trait for mutable maps with multiple values assigned to a key. + * + * This class is typically used as a mixin. It turns maps which map `K` + * to `Set[V]` objects into multimaps that map `K` to `V` objects. + * + * @example {{{ + * // first import all necessary types from package `collection.mutable` + * import collection.mutable.{ HashMap, MultiMap, Set } + * + * // to create a `MultiMap` the easiest way is to mixin it into a normal + * // `Map` instance + * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] + * + * // to add key-value pairs to a multimap it is important to use + * // the method `addBinding` because standard methods like `+` will + * // overwrite the complete key-value pair instead of adding the + * // value to the existing key + * mm.addBinding(1, "a") + * mm.addBinding(2, "b") + * mm.addBinding(1, "c") + * + * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` + * + * // to check if the multimap contains a value there is method + * // `entryExists`, which allows to traverse the including set + * mm.entryExists(1, _ == "a") == true + * mm.entryExists(1, _ == "b") == false + * mm.entryExists(2, _ == "b") == true + * + * // to remove a previous added value there is the method `removeBinding` + * mm.removeBinding(1, "a") + * mm.entryExists(1, _ == "a") == false + * }}} + * + * @define coll multimap + * @define Coll `MultiMap` + */ +@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") +trait MultiMap[K, V] extends Map[K, Set[V]] { + /** Creates a new set. + * + * Classes that use this trait as a mixin can override this method + * to have the desired implementation of sets assigned to new keys. + * By default this is `HashSet`. + * + * @return An empty set of values of type `V`. + */ + protected def makeSet: Set[V] = new HashSet[V] + + /** Assigns the specified `value` to a specified `key`. If the key + * already has a binding to equal to `value`, nothing is changed; + * otherwise a new binding is added for that `key`. + * + * @param key The key to which to bind the new value. + * @param value The value to bind to the key. + * @return A reference to this multimap. + */ + def addBinding(key: K, value: V): this.type = { + get(key) match { + case None => + val set = makeSet + set += value + this(key) = set + case Some(set) => + set += value + } + this + } + + /** Removes the binding of `value` to `key` if it exists, otherwise this + * operation doesn't have any effect. + * + * If this was the last value assigned to the specified key, the + * set assigned to that key will be removed as well. + * + * @param key The key of the binding. + * @param value The value to remove. + * @return A reference to this multimap. + */ + def removeBinding(key: K, value: V): this.type = { + get(key) match { + case None => + case Some(set) => + set -= value + if (set.isEmpty) this -= key + } + this + } + + /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. + * + * @param key The key for which the predicate is checked. + * @param p The predicate which a value assigned to the key must satisfy. + * @return A boolean if such a binding exists + */ + def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { + case None => false + case Some(set) => set exists p + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala index 3e9b16540031..e98536d0dad5 100644 --- a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala +++ b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala @@ -15,7 +15,6 @@ package collection package mutable import java.util.ConcurrentModificationException -import language.experimental.captureChecking /** * Utilities to check that mutations to a client that tracks @@ -67,7 +66,7 @@ private object MutationTracker { * @param mutationCount a by-name provider of the current mutation count * @tparam A the type of the iterator's elements */ - final class CheckedIterator[A](underlying: Iterator[A]^, mutationCount: => Int) extends AbstractIterator[A] { + final class CheckedIterator[A](underlying: Iterator[A], mutationCount: => Int) extends AbstractIterator[A] { private[this] val expectedCount = mutationCount def hasNext: Boolean = { diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala new file mode 100644 index 000000000000..22e99d4650d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala @@ -0,0 +1,306 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.lang.Integer.numberOfLeadingZeros +import java.util.ConcurrentModificationException +import scala.collection.generic.DefaultSerializable + +/** + * @define Coll `OpenHashMap` + * @define coll open hash map + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +@SerialVersionUID(3L) +object OpenHashMap extends MapFactory[OpenHashMap] { + + def empty[K, V] = new OpenHashMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]): OpenHashMap[K,V] = empty ++= it + + def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] = + new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) + + /** A hash table entry. + * + * The entry is occupied if and only if its `value` is a `Some`; + * deleted if and only if its `value` is `None`. + * If its `key` is not the default value of type `Key`, the entry is occupied. + * If the entry is occupied, `hash` contains the hash value of `key`. + */ + final private class OpenEntry[Key, Value](var key: Key, + var hash: Int, + var value: Option[Value]) + + private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** A mutable hash map based on an open addressing method. The precise scheme is + * undefined, but it should make a reasonable effort to ensure that an insert + * with consecutive hash codes is not unnecessarily penalised. In particular, + * mappings of consecutive integer keys should work without significant + * performance loss. + * + * @tparam Key type of the keys in this map. + * @tparam Value type of the values in this map. + * @param initialSize the initial size of the internal hash table. + * + * @define Coll `OpenHashMap` + * @define coll open hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +class OpenHashMap[Key, Value](initialSize : Int) + extends AbstractMap[Key, Value] + with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] + with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] + with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] + with DefaultSerializable { + + import OpenHashMap.OpenEntry + private type Entry = OpenEntry[Key, Value] + + /** A default constructor creates a hashmap with initial size `8`. + */ + def this() = this(8) + + override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap + + private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) + + private[this] var mask = actualInitialSize - 1 + + /** The hash table. + * + * The table's entries are initialized to `null`, indication of an empty slot. + * A slot is either deleted or occupied if and only if the entry is non-`null`. + */ + private[this] var table = new Array[Entry](actualInitialSize) + + private[this] var _size = 0 + private[this] var deleted = 0 + + // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. + private[this] var modCount = 0 + + override def size = _size + override def knownSize: Int = size + private[this] def size_=(s : Int): Unit = _size = s + override def isEmpty: Boolean = _size == 0 + /** Returns a mangled hash code of the provided key. */ + protected def hashOf(key: Key) = { + var h = key.## + h ^= ((h >>> 20) ^ (h >>> 12)) + h ^ (h >>> 7) ^ (h >>> 4) + } + + /** Increase the size of the table. + * Copy only the occupied slots, effectively eliminating the deleted slots. + */ + private[this] def growTable() = { + val oldSize = mask + 1 + val newSize = 4 * oldSize + val oldTable = table + table = new Array[Entry](newSize) + mask = newSize - 1 + oldTable.foreach( entry => + if (entry != null && entry.value != None) + table(findIndex(entry.key, entry.hash)) = entry ) + deleted = 0 + } + + /** Return the index of the first slot in the hash table (in probe order) + * that is, in order of preference, either occupied by the given key, deleted, or empty. + * + * @param hash hash value for `key` + */ + private[this] def findIndex(key: Key, hash: Int): Int = { + var index = hash & mask + var j = 0 + + // Index of the first slot containing a deleted entry, or -1 if none found yet + var firstDeletedIndex = -1 + + var entry = table(index) + while (entry != null) { + if (entry.hash == hash && entry.key == key && entry.value != None) + return index + + if (firstDeletedIndex == -1 && entry.value == None) + firstDeletedIndex = index + + j += 1 + index = (index + j) & mask + entry = table(index) + } + + if (firstDeletedIndex == -1) index else firstDeletedIndex + } + + // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. + override def update(key: Key, value: Value): Unit = put(key, value) + + @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") + def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } + + @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") + def subtractOne (key: Key): this.type = { remove(key); this } + + override def put(key: Key, value: Value): Option[Value] = + put(key, hashOf(key), value) + + private def put(key: Key, hash: Int, value: Value): Option[Value] = { + if (2 * (size + deleted) > mask) growTable() + val index = findIndex(key, hash) + val entry = table(index) + if (entry == null) { + table(index) = new OpenEntry(key, hash, Some(value)) + modCount += 1 + size += 1 + None + } else { + val res = entry.value + if (entry.value == None) { + entry.key = key + entry.hash = hash + size += 1 + deleted -= 1 + modCount += 1 + } + entry.value = Some(value) + res + } + } + + /** Delete the hash table slot contained in the given entry. */ + @`inline` + private[this] def deleteSlot(entry: Entry) = { + entry.key = null.asInstanceOf[Key] + entry.hash = 0 + entry.value = None + + size -= 1 + deleted += 1 + } + + override def remove(key : Key): Option[Value] = { + val entry = table(findIndex(key, hashOf(key))) + if (entry != null && entry.value != None) { + val res = entry.value + deleteSlot(entry) + res + } else None + } + + def get(key : Key) : Option[Value] = { + val hash = hashOf(key) + var index = hash & mask + var entry = table(index) + var j = 0 + while(entry != null){ + if (entry.hash == hash && + entry.key == key){ + return entry.value + } + + j += 1 + index = (index + j) & mask + entry = table(index) + } + None + } + + /** An iterator over the elements of this map. Use of this iterator follows + * the same contract for concurrent modification as the foreach method. + * + * @return the iterator + */ + def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { + override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) + } + + override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { + override protected def nextResult(node: Entry): Key = node.key + } + override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { + override protected def nextResult(node: Entry): Value = node.value.get + } + + private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { + private[this] var index = 0 + private[this] val initialModCount = modCount + + private[this] def advance(): Unit = { + if (initialModCount != modCount) throw new ConcurrentModificationException + while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 + } + + def hasNext = {advance(); index <= mask } + + def next() = { + advance() + val result = table(index) + index += 1 + nextResult(result) + } + protected def nextResult(node: Entry): A + } + + override def clone() = { + val it = new OpenHashMap[Key, Value] + foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) + it + } + + /** Loop over the key, value mappings of this map. + * + * The behaviour of modifying the map during an iteration is as follows: + * - Deleting a mapping is always permitted. + * - Changing the value of mapping which is already present is permitted. + * - Anything else is not permitted. It will usually, but not always, throw an exception. + * + * @tparam U The return type of the specified function `f`, return result of which is ignored. + * @param f The function to apply to each key, value mapping. + */ + override def foreach[U](f : ((Key, Value)) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f((entry.key, entry.value.get))} + ) + } + override def foreachEntry[U](f : (Key, Value) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f(entry.key, entry.value.get)} + ) + } + + private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { + table.foreach(entry => if (entry != null && entry.value != None) f(entry)) + } + + override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { + foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) + this + } + + override def filterInPlace(f : (Key, Value) => Boolean): this.type = { + foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) + this + } + + override protected[this] def stringPrefix = "OpenHashMap" +} diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala new file mode 100644 index 000000000000..5572bdca3cf6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -0,0 +1,402 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.math.Ordering + +/** A heap-based priority queue. + * + * To prioritize elements of type `A` there must be an implicit + * `Ordering[A]` available at creation. Elements are retrieved + * in priority order by using [[dequeue]] or [[dequeueAll]]. + * + * If multiple elements have the same priority as determined by the ordering for this + * `PriorityQueue`, no guarantees are made regarding the order in which those elements + * are returned by `dequeue` or `dequeueAll`. In particular, that means this + * class does not guarantee first-in-first-out behavior, as may be + * incorrectly inferred from the fact that this data structure is + * called a "queue". + * + * Only the `dequeue` and `dequeueAll` methods will return elements in priority + * order (while removing elements from the heap). Standard collection methods + * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary + * iteration order: they will traverse the heap or remove elements + * in whichever order seems most convenient. + * + * Therefore, printing a `PriorityQueue` will not show elements in priority order, + * though the highest-priority element will be printed first. + * To print the elements in order, it's necessary to `dequeue` them. + * To do this non-destructively, duplicate the `PriorityQueue` first; + * the `clone` method is a suitable way to obtain a disposable copy. + * + * Client keys are assumed to be immutable. Mutating keys may violate + * the invariant of the underlying heap-ordered tree. Note that [[clone]] + * does not rebuild the underlying tree. + * + * {{{ + * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) + * + * scala> pq.toList // also not in order + * val res0: List[Int] = List(7, 3, 5, 1, 2) + * + * scala> pq.clone.dequeueAll + * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) + * }}} + * + * @tparam A type of the elements in this priority queue. + * @param ord implicit ordering used to compare the elements of type `A`. + * + * @define Coll PriorityQueue + * @define coll priority queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class PriorityQueue[A](implicit val ord: Ordering[A]) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, Iterable, PriorityQueue[A]] + with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] + with Builder[A, PriorityQueue[A]] + with Cloneable[PriorityQueue[A]] + with Growable[A] + with Serializable +{ + + private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + override def mapInPlace(f: A0 => A0): this.type = { + var i = 1 // see "we do not use array(0)" comment below (???) + val siz = this.size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + def p_size0 = size0 + def p_size0_=(s: Int) = size0 = s + def p_array = array + def p_ensureSize(n: Int) = super.ensureSize(n) + def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n) + def p_swap(a: Int, b: Int): Unit = { + val h = array(a) + array(a) = array(b) + array(b) = h + } + } + + private val resarr = new ResizableArrayAccess[A] + + resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for? + def length: Int = resarr.length - 1 // adjust length accordingly + override def size: Int = length + override def knownSize: Int = length + override def isEmpty: Boolean = resarr.p_size0 < 2 + + // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder + override def empty: PriorityQueue[A] = PriorityQueue.empty + + def mapInPlace(f: A => A): this.type = { + resarr.mapInPlace(f) + heapify(1) + this + } + + def result() = this + + private def toA(x: AnyRef): A = x.asInstanceOf[A] + protected def fixUp(as: Array[AnyRef], m: Int): Unit = { + var k: Int = m + // use `ord` directly to avoid allocating `OrderingOps` + while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { + resarr.p_swap(k, k / 2) + k = k / 2 + } + } + + protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { + // returns true if any swaps were done (used in heapify) + var k: Int = m + while (n >= 2 * k) { + var j = 2 * k + // use `ord` directly to avoid allocating `OrderingOps` + if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) + j += 1 + if (ord.gteq(toA(as(k)), toA(as(j)))) + return k != m + else { + val h = as(k) + as(k) = as(j) + as(j) = h + k = j + } + } + k != m + } + + /** Inserts a single element into the priority queue. + * + * @param elem the element to insert. + * @return this $coll. + */ + def addOne(elem: A): this.type = { + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + fixUp(resarr.p_array, resarr.p_size0) + resarr.p_size0 += 1 + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + val from = resarr.p_size0 + for (x <- xs.iterator) unsafeAdd(x) + heapify(from) + this + } + + private def unsafeAdd(elem: A): Unit = { + // like += but skips fixUp, which breaks the ordering invariant + // a series of unsafeAdds MUST be followed by heapify + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + resarr.p_size0 += 1 + } + + private def heapify(from: Int): Unit = { + // elements at indices 1..from-1 were already in heap order before any adds + // elements at indices from..n are newly added, their order must be fixed + val n = length + + if (from <= 2) { + // no pre-existing order to maintain, do the textbook heapify algorithm + for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) + } + else if (n - from < 4) { + // for very small adds, doing the simplest fix is faster + for (i <- from to n) fixUp(resarr.p_array, i) + } + else { + var min = from/2 // tracks the minimum element in the queue + val queue = scala.collection.mutable.Queue[Int](min) + + // do fixDown on the parents of all the new elements + // except the parent of the first new element, which is in the queue + // (that parent is treated specially because it might be the root) + for (i <- n/2 until min by -1) { + if (fixDown(resarr.p_array, i, n)) { + // there was a swap, so also need to fixDown i's parent + val parent = i/2 + if (parent < min) { // make sure same parent isn't added twice + min = parent + queue += parent + } + } + } + + while (queue.nonEmpty) { + val i = queue.dequeue() + if (fixDown(resarr.p_array, i, n)) { + val parent = i/2 + if (parent < min && parent > 0) { + // the "parent > 0" is to avoid adding the parent of the root + min = parent + queue += parent + } + } + } + } + } + + /** Adds all elements provided by a `IterableOnce` object + * into the priority queue. + * + * @param xs a iterable object. + * @return a new priority queue containing elements of both `xs` and `this`. + */ + def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } + + /** Adds all elements to the queue. + * + * @param elems the elements to add. + */ + def enqueue(elems: A*): Unit = { this ++= elems } + + /** Returns the element with the highest priority in the queue, + * and removes this element from the queue. + * + * @throws NoSuchElementException + * @return the element with the highest priority. + */ + def dequeue(): A = + if (resarr.p_size0 > 1) { + resarr.p_size0 = resarr.p_size0 - 1 + val result = resarr.p_array(1) + resarr.p_array(1) = resarr.p_array(resarr.p_size0) + resarr.p_array(resarr.p_size0) = null // erase reference from array + fixDown(resarr.p_array, 1, resarr.p_size0 - 1) + toA(result) + } else + throw new NoSuchElementException("no element to remove from heap") + + def dequeueAll[A1 >: A]: immutable.Seq[A1] = { + val b = ArrayBuilder.make[Any] + b.sizeHint(size) + while (nonEmpty) { + b += dequeue() + } + immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] + } + + /** Returns the element with the highest priority in the queue, + * or throws an error if there is no element contained in the queue. + * + * @return the element with the highest priority. + */ + override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") + + /** Removes all elements from the queue. After this operation is completed, + * the queue will be empty. + */ + def clear(): Unit = { + resarr.clear() + resarr.p_size0 = 1 + } + + /** Returns an iterator which yields all the elements. + * + * Note: The order of elements returned is undefined. + * If you want to traverse the elements in priority queue + * order, use `clone().dequeueAll.iterator`. + * + * @return an iterator over all the elements. + */ + override def iterator: Iterator[A] = resarr.iterator.drop(1) + + /** Returns the reverse of this priority queue. The new priority queue has + * the same elements as the original, but the opposite ordering. + * + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. + * + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. + */ + def reverse: PriorityQueue[A] = { + val revq = new PriorityQueue[A]()(ord.reverse) + // copy the existing data into the new array backwards + // this won't put it exactly into the correct order, + // but will require less fixing than copying it in + // the original order + val n = resarr.p_size0 + revq.resarr.p_ensureSize(n) + revq.resarr.p_size0 = n + val from = resarr.p_array + val to = revq.resarr.p_array + for (i <- 1 until n) to(i) = from(n-i) + revq.heapify(1) + revq + } + + + /** Returns an iterator which yields all the elements in the reverse order + * than that returned by the method `iterator`. + * + * Note: The order of elements returned is undefined. + * + * @return an iterator over all elements sorted in descending order. + */ + def reverseIterator: Iterator[A] = new AbstractIterator[A] { + private[this] var i = resarr.p_size0 - 1 + def hasNext: Boolean = i >= 1 + def next(): A = { + val n = resarr.p_array(i) + i -= 1 + toA(n) + } + } + + /** Returns a regular queue containing the same elements. + * + * Note: the order of elements is undefined. + */ + def toQueue: Queue[A] = new Queue[A] ++= this.iterator + + /** Returns a textual representation of a queue as a string. + * + * @return the string representation of this queue. + */ + override def toString() = toList.mkString("PriorityQueue(", ", ", ")") + + /** Converts this $coll to a list. + * + * Note: the order of elements is undefined. + * + * @return a list containing all elements of this $coll. + */ + override def toList: immutable.List[A] = immutable.List.from(this.iterator) + + /** This method clones the priority queue. + * + * @return a priority queue with the same elements. + */ + override def clone(): PriorityQueue[A] = { + val pq = new PriorityQueue[A] + val n = resarr.p_size0 + pq.resarr.p_ensureSize(n) + java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) + pq.resarr.p_size0 = n + pq + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if (copied > 0) { + Array.copy(resarr.p_array, 1, xs, start, copied) + } + copied + } + + @deprecated("Use `PriorityQueue` instead", "2.13.0") + def orderedCompanion: PriorityQueue.type = PriorityQueue + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) + + override protected[this] def className = "PriorityQueue" +} + + +@SerialVersionUID(3L) +object PriorityQueue extends SortedIterableFactory[PriorityQueue] { + def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() + } + } + + def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + + def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + val b = newBuilder[E] + b ++= it + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala new file mode 100644 index 000000000000..18cce0bd3852 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable + + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * @define Coll `mutable.Queue` + * @define coll mutable queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with ArrayDequeOps[A, Queue, Queue[A]] + with Cloneable[Queue[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Queue] = Queue + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Queue" + + /** + * Add elements to the end of this queue + * + * @param elem + * @return this + */ + def enqueue(elem: A): this.type = this += elem + + /** Enqueue two or more elements at the end of the queue. The last element + * of the sequence will be on end of the queue. + * + * @param elems the element sequence. + * @return this + */ + def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) + + /** Enqueues all elements in the given iterable object into the queue. The + * last element in the iterable object will be on front of the new queue. + * + * @param elems the iterable object. + * @return this + */ + def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems + + /** + * Removes the first element from this queue and returns it + * + * @return + * @throws NoSuchElementException when queue is empty + */ + def dequeue(): A = removeHead() + + /** Returns the first element in the queue which satisfies the + * given predicate, and removes this element from the queue. + * + * @param p the predicate used for choosing the first element + * @return the first element of the queue for which p yields true + */ + def dequeueFirst(p: A => Boolean): Option[A] = + removeFirst(p) + + /** Returns all elements in the queue which satisfy the + * given predicate, and removes those elements from the queue. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = + removeAll(p) + + /** + * Returns and dequeues all elements from the queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @return the first element. + */ + @`inline` final def front: A = head + + override protected def klone(): Queue[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = + new Queue(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll queue + * @define Coll `Queue` + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + + def from[A](source: IterableOnce[A]): Queue[A] = empty ++= source + + def empty[A]: Queue[A] = new Queue + + def newBuilder[A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala new file mode 100644 index 000000000000..3ac0e1a1f797 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala @@ -0,0 +1,652 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.annotation.tailrec +import collection.{AbstractIterator, Iterator} +import java.lang.String + +/** + * An object containing the red-black tree implementation used by mutable `TreeMaps`. + * + * The trees implemented in this object are *not* thread safe. + */ +private[collection] object RedBlackTree { + + // ---- class structure ---- + + // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. + // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. + // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) + // on the size of the range. + + final class Tree[A, B](var root: Node[A, B], var size: Int) { + def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) + } + + final class Node[A, B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { + override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" + } + + object Tree { + def empty[A, B]: Tree[A, B] = new Tree(null, 0) + } + + object Node { + + @`inline` def apply[A, B](key: A, value: B, red: Boolean, + left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, left, right, parent) + + @`inline` def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, null, null, parent) + + def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) + } + + // ---- getters ---- + + def isRed(node: Node[_, _]) = (node ne null) && node.red + def isBlack(node: Node[_, _]) = (node eq null) || !node.red + + // ---- size ---- + + def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) + def size(tree: Tree[_, _]): Int = tree.size + def isEmpty(tree: Tree[_, _]) = tree.root eq null + def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } + + // ---- search ---- + + def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { + case null => None + case node => Some(node.value) + } + + @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = + if (node eq null) null + else { + val cmp = ord.compare(key, node.key) + if (cmp < 0) getNode(node.left, key) + else if (cmp > 0) getNode(node.right, key) + else node + } + + def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null + + def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def minNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else minNodeNonNull(node) + + @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def maxNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else maxNodeNonNull(node) + + @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.right eq null) node else maxNodeNonNull(node.right) + + /** + * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such + * node. + */ + def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp <= 0) y else successor(y) + } + } + + /** + * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp > 0) y else predecessor(y) + } + } + + // ---- insertion ---- + + def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { + var y: Node[A, B] = null + var x = tree.root + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + + if (cmp == 0) y.value = value + else { + val z = Node.leaf(key, value, red = true, y) + + if (y eq null) tree.root = z + else if (cmp < 0) y.left = z + else y.right = z + + fixAfterInsert(tree, z) + tree.size += 1 + } + } + + private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + rotateLeft(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateRight(tree, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + rotateRight(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateLeft(tree, z.parent.parent) + } + } + } + tree.root.red = false + } + + // ---- deletion ---- + + def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { + val z = getNode(tree.root, key) + if (z ne null) { + var y = z + var yIsRed = y.red + var x: Node[A, B] = null + var xParent: Node[A, B] = null + + if (z.left eq null) { + x = z.right + transplant(tree, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + transplant(tree, z, z.left) + xParent = z.parent + } + else { + y = minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + transplant(tree, y, y.right) + y.right = z.right + y.right.parent = y + } + transplant(tree, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) fixAfterDelete(tree, x, xParent) + tree.size -= 1 + } + } + + private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { + var x = node + var xParent = parent + while ((x ne tree.root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateLeft(tree, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + rotateRight(tree, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + rotateLeft(tree, xParent) + x = tree.root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateRight(tree, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + rotateLeft(tree, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + rotateRight(tree, xParent) + x = tree.root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + } + + // ---- helpers ---- + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + /** + * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, + * therefore, the first node), this method returns `null`. + */ + private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.left ne null) maxNodeNonNull(node.left) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.left)) { + x = y + y = y.parent + } + y + } + } + + private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.right ne null) + val y = x.right + x.right = y.left + + if (y.left ne null) y.left.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.left) x.parent.left = y + else x.parent.right = y + + y.left = x + x.parent = y + } + + private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.left ne null) + val y = x.left + x.left = y.right + + if (y.right ne null) y.right.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.right) x.parent.right = y + else x.parent.left = y + + y.right = x + x.parent = y + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { + if (to.parent eq null) tree.root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + + if (from ne null) from.parent = to.parent + } + + // ---- tree traversal ---- + + def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) + + private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = + if (node ne null) foreachNodeNonNull(node, f) + + private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { + if (node.left ne null) foreachNodeNonNull(node.left, f) + f((node.key, node.value)) + if (node.right ne null) foreachNodeNonNull(node.right, f) + } + + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + def g(node: Node[A, _]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + def g(node: Node[A, B]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key, node.value) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) + + private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = + if (node ne null) transformNodeNonNull(node, f) + + private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { + if (node.left ne null) transformNodeNonNull(node.left, f) + node.value = f(node.key, node.value) + if (node.right ne null) transformNodeNonNull(node.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = + new EntriesIterator(tree, start, end) + + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = + new KeysIterator(tree, start, end) + + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = + new ValuesIterator(tree, start, end) + + private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) + (implicit ord: Ordering[A]) extends AbstractIterator[R] { + + protected def nextResult(node: Node[A, B]): R + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): R = nextNode match { + case null => throw new NoSuchElementException("next on empty iterator") + case node => + nextNode = successor(node) + setNullIfAfterEnd() + nextResult(node) + } + + private[this] var nextNode: Node[A, B] = start match { + case None => minNode(tree.root) + case Some(from) => minNodeAfter(tree.root, from) + } + + private[this] def setNullIfAfterEnd(): Unit = + if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) + nextNode = null + + setNullIfAfterEnd() + } + + private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, (A, B)](tree, start, end) { + + def nextResult(node: Node[A, B]) = (node.key, node.value) + } + + private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, A](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.key + } + + private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, B](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.value + } + + // ---- debugging ---- + + /** + * Checks if the tree is in a valid state. That happens if: + * - It is a valid binary search tree; + * - All red-black properties are satisfied; + * - All non-null nodes have their `parent` reference correct; + * - The size variable in `tree` corresponds to the actual size of the tree. + */ + def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = + isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size + + /** + * Returns true if all non-null nodes have their `parent` reference correct. + */ + private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { + + def hasProperParentRefs(node: Node[A, B]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (node.left.parent ne node) || + (node.right ne null) && (node.right.parent ne node)) false + else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) + } + } + + if(tree.root eq null) true + else (tree.root.parent eq null) && hasProperParentRefs(tree.root) + } + + /** + * Returns true if this node follows the properties of a binary search tree. + */ + private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || + (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false + else isValidBST(node.left) && isValidBST(node.right) + } + } + + /** + * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red + * nodes are black and if the path from any node to any of its null children has the same number of black nodes. + */ + private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { + + def noRedAfterRed(node: Node[A, B]): Boolean = { + if (node eq null) true + else if (node.red && (isRed(node.left) || isRed(node.right))) false + else noRedAfterRed(node.left) && noRedAfterRed(node.right) + } + + def blackHeight(node: Node[A, B]): Int = { + if (node eq null) 1 + else { + val lh = blackHeight(node.left) + val rh = blackHeight(node.right) + + if (lh == -1 || lh != rh) -1 + else if (isRed(node)) lh + else lh + 1 + } + } + + isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 + } + + // building + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, Null] = size match { + case 0 => null + case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(x, null, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(k, v, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + def copyTree[A, B](n: Node[A, B]): Node[A, B] = + if(n eq null) null else { + val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) + if(c.left != null) c.left.parent = c + if(c.right != null) c.right.parent = c + c + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala new file mode 100644 index 000000000000..d7d3b6db4f09 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala @@ -0,0 +1,55 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + + +/** `ReusableBuilder` is a marker trait that indicates that a `Builder` + * can be reused to build more than one instance of a collection. In + * particular, calling `result()` followed by `clear()` will produce a + * collection and reset the builder to begin building a new collection + * of the same type. + * + * In general no method other than `clear()` may be called after `result()`. + * It is up to subclasses to implement and to document other allowed sequences + * of operations (e.g. calling other methods after `result()` in order to obtain + * different snapshots of a collection under construction). + * + * @tparam Elem the type of elements that get added to the builder. + * @tparam To the type of collection that it produced. + * + * @define multipleResults + * + * This Builder can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ +trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { + /** Clears the contents of this builder. + * After execution of this method, the builder will contain no elements. + * + * If executed immediately after a call to `result()`, this allows a new + * instance of the same type of collection to be built. + */ + override def clear(): Unit // Note: overriding for Scaladoc only! + + /** Produces a collection from the added elements. + * + * After a call to `result`, the behavior of all other methods is undefined + * save for `clear()`. If `clear()` is called, then the builder is reset and + * may be used to build another instance. + * + * @return a collection containing the elements added to this builder. + */ + override def result(): To // Note: overriding for Scaladoc only! +} diff --git a/tests/pos-special/stdlib/collection/mutable/Seq.scala b/tests/pos-special/stdlib/collection/mutable/Seq.scala index 443eec379c1b..e83d79987208 100644 --- a/tests/pos-special/stdlib/collection/mutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/mutable/Seq.scala @@ -13,7 +13,6 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, SeqFactory} -import language.experimental.captureChecking trait Seq[A] extends Iterable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala new file mode 100644 index 000000000000..67066f99e07e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** + * A generic trait for ordered mutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll mutable Seq map + * @define Coll `mutable.SeqMap` + */ + +trait SeqMap[K, V] extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap) diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala new file mode 100644 index 000000000000..6530e8fedf05 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} + +/** Base trait for mutable sets */ +trait Set[A] + extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + + override def iterableFactory: IterableFactory[Set] = Set +} + +/** + * @define coll mutable set + * @define Coll `mutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] + with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below + with Cloneable[C] + with Builder[A, C] + with Growable[A] + with Shrinkable[A] { + + def result(): C = coll + + /** Check whether the set contains the given element, and add it if not. + * + * @param elem the element to be added + * @return true if the element was added + */ + def add(elem: A): Boolean = + !contains(elem) && { + coll += elem; true + } + + /** Updates the presence of a single element in this set. + * + * This method allows one to add or remove an element `elem` + * from this set depending on the value of parameter `included`. + * Typically, one would use the following syntax: + * {{{ + * set(elem) = true // adds element + * set(elem) = false // removes element + * }}} + * + * @param elem the element to be added or removed + * @param included a flag indicating whether element should be included or excluded. + */ + def update(elem: A, included: Boolean): Unit = { + if (included) add(elem) + else remove(elem) + } + + /** Removes an element from this set. + * + * @param elem the element to be removed + * @return true if this set contained the element before it was removed + */ + def remove(elem: A): Boolean = { + val res = contains(elem) + coll -= elem + res + } + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) + + /** Removes all elements from the set for which do not satisfy a predicate. + * @param p the predicate used to test elements. Only elements for + * which `p` returns `true` are retained in the set; all others + * are removed. + */ + def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val elem = array(i).asInstanceOf[A] + if (!p(elem)) { + this -= elem + } + i += 1 + } + } + this + } + + override def clone(): C = empty ++= this + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable set + * @define Coll `mutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](HashSet) + + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala index de2a24ecf01f..006a3b88e49f 100644 --- a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala @@ -14,7 +14,6 @@ package scala package collection.mutable import scala.annotation.tailrec -import language.experimental.captureChecking /** This trait forms part of collections that can be reduced * using a `-=` operator. @@ -53,7 +52,7 @@ trait Shrinkable[-A] { * @param xs the iterator producing the elements to remove. * @return the $coll itself */ - def subtractAll(xs: collection.IterableOnce[A]^): this.type = { + def subtractAll(xs: collection.IterableOnce[A]): this.type = { @tailrec def loop(xs: collection.LinearSeq[A]): Unit = { if (xs.nonEmpty) { subtractOne(xs.head) @@ -75,6 +74,6 @@ trait Shrinkable[-A] { } /** Alias for `subtractAll` */ - @`inline` final def --= (xs: collection.IterableOnce[A]^): this.type = subtractAll(xs) + @`inline` final def --= (xs: collection.IterableOnce[A]): this.type = subtractAll(xs) } diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala new file mode 100644 index 000000000000..eb2f0d231b7a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -0,0 +1,103 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} + +/** + * Base type for mutable sorted map collections + */ +trait SortedMap[K, V] + extends collection.SortedMap[K, V] + with Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same sorted map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault(d: K => V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) +} + +trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends collection.SortedMapOps[K, V, CC, C] + with MapOps[K, V, Map, C] { + + def unsorted: Map[K, V] + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + @SerialVersionUID(3L) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K => V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] + with Serializable { + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + implicit def ordering: Ordering[K] = underlying.ordering + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala new file mode 100644 index 000000000000..2bcb8dc7845a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -0,0 +1,48 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** + * Base type for mutable sorted set collections + */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +/** + * $factoryInfo + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala new file mode 100644 index 000000000000..675666bc805c --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -0,0 +1,142 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.annotation.{migration, nowarn} +import scala.collection.generic.DefaultSerializable +import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} + +/** A stack implements a data structure which allows to store and retrieve + * objects in a last-in-first-out (LIFO) fashion. + * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * + * @tparam A type of the elements contained in this stack. + * + * @define Coll `Stack` + * @define coll stack + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") +class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Stack, Stack[A]] + with StrictOptimizedSeqOps[A, Stack, Stack[A]] + with IterableFactoryDefaults[A, Stack] + with ArrayDequeOps[A, Stack, Stack[A]] + with Cloneable[Stack[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Stack] = Stack + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Stack" + + /** + * Add elements to the top of this stack + * + * @param elem + * @return + */ + def push(elem: A): this.type = prepend(elem) + + /** Push two or more elements onto the stack. The last element + * of the sequence will be on top of the new stack. + * + * @param elems the element sequence. + * @return the stack with the new elements on top. + */ + def push(elem1: A, elem2: A, elems: A*): this.type = { + val k = elems.knownSize + ensureSize(length + (if(k >= 0) k + 2 else 3)) + prepend(elem1).prepend(elem2).pushAll(elems) + } + + /** Push all elements in the given iterable object onto the stack. The + * last element in the iterable object will be on top of the new stack. + * + * @param elems the iterable object. + * @return the stack with the new elements on top. + */ + def pushAll(elems: scala.collection.IterableOnce[A]): this.type = + prependAll(elems match { + case it: scala.collection.Seq[A] => it.view.reverse + case it => IndexedSeq.from(it).view.reverse + }) + + /** + * Removes the top element from this stack and return it + * + * @return + * @throws NoSuchElementException when stack is empty + */ + def pop(): A = removeHead() + + /** + * Pop all elements from this stack and return it + * + * @return The removed elements + */ + def popAll(): scala.collection.Seq[A] = removeAll() + + /** + * Returns and removes all elements from the top of this stack which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the top element of the stack. This method will not remove + * the element from the stack. An error is signaled if there is no + * element on the stack. + * + * @throws NoSuchElementException + * @return the top element + */ + @`inline` final def top: A = head + + override protected def klone(): Stack[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = + new Stack(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll stack + * @define Coll `Stack` + */ +@SerialVersionUID(3L) +object Stack extends StrictOptimizedSeqFactory[Stack] { + + def from[A](source: IterableOnce[A]): Stack[A] = empty ++= source + + def empty[A]: Stack[A] = new Stack + + def newBuilder[A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index c7859214821d..1d8b9563e917 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -14,7 +14,6 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, IterableOnce} import scala.collection.immutable.WrappedString -import language.experimental.captureChecking import scala.Predef.{ // unimport char-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -82,7 +81,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr // Methods required to make this an IndexedSeq: def apply(i: Int): Char = underlying.charAt(i) - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): StringBuilder = + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): StringBuilder = new StringBuilder() appendAll coll override protected def newSpecificBuilder: Builder[Char, StringBuilder] = @@ -185,7 +184,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @param xs the characters to be appended. * @return this StringBuilder. */ - def appendAll(xs: IterableOnce[Char]^): this.type = { + def appendAll(xs: IterableOnce[Char]): this.type = { xs match { case x: WrappedString => underlying append x.unwrap case x: ArraySeq.ofChar => underlying append x.array @@ -314,7 +313,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @return this StringBuilder. * @throws StringIndexOutOfBoundsException if the index is out of bounds. */ - def insertAll(index: Int, xs: IterableOnce[Char]^): this.type = + def insertAll(index: Int, xs: IterableOnce[Char]): this.type = insertAll(index, (ArrayBuilder.make[Char] ++= xs).result()) /** Inserts the given Array[Char] into this sequence at the given index. diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala new file mode 100644 index 000000000000..1af968a08ac3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala @@ -0,0 +1,257 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} + +/** + * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + override def sortedMapFactory = TreeMap + + /** + * Creates an empty `TreeMap`. + * @param ord the implicit ordering used to compare objects of type `K`. + * @return an empty `TreeMap`. + */ + def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, None) + } + + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, None) + } + + def keysIteratorFrom(start: K): Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, Some(start)) + } + + def iteratorFrom(start: K): Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree, Some(start)) + } + + override def valuesIteratorFrom(start: K): Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, Some(start)) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( + size, tree.root, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } + + def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } + + override def clear(): Unit = RB.clear(tree) + + def get(key: K): Option[V] = RB.get(tree, key) + + /** + * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and + * vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def contains(key: K): Boolean = RB.contains(tree, key) + + override def head: (K, V) = RB.min(tree).get + + override def last: (K, V) = RB.max(tree).get + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) + + override protected[this] def className: String = "TreeMap" + + + /** + * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: K): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = + new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) + + override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None + + override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) + override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) + override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) + override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) + override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) + override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext + override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def head = headOption.get + override def headOption = { + val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) + (entry, until) match { + case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None + case _ => entry + } + } + + override def last = lastOption.get + override def lastOption = { + val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) + (entry, from) match { + case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None + case _ => entry + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized + // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) + + override def clone() = super.clone().rangeImpl(from, until) + } + +} + +/** + * $factoryInfo + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): TreeMap[K, V] = + Growable.from(empty[K, V], it) + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() + + def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala new file mode 100644 index 000000000000..bed474dc02a3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -0,0 +1,218 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} +import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} + +/** + * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam A the type of the keys contained in this tree set. + * + * @define Coll mutable.TreeSet + * @define coll mutable tree set + */ +// Original API designed in part by Lucien Pereira +sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedIterableOps[A, Set, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) + throw new NullPointerException("ordering must not be null") + + /** + * Creates an empty `TreeSet`. + * @param ord the implicit ordering used to compare objects of type `A`. + * @return an empty `TreeSet`. + */ + def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) + + override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet + + def iterator: collection.Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[A, Null] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: A): this.type = { + RB.insert(tree, elem, null) + this + } + + def subtractOne(elem: A): this.type = { + RB.delete(tree, elem) + this + } + + def clear(): Unit = RB.clear(tree) + + def contains(elem: A): Boolean = RB.contains(tree, elem) + + def unconstrained: collection.Set[A] = this + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) + + override protected[this] def className: String = "TreeSet" + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def head: A = RB.minKey(tree).get + + override def last: A = RB.maxKey(tree).get + + override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) + + override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + + /** + * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. + * + * Only keys between this projection's key range will ever appear as elements of this set, independently of whether + * the elements are added through the original set or through this view. That means that if one inserts an element in + * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. + * Mutations are always reflected in the original set, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: A): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = + new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) + + override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def iterator = RB.keysIterator(tree, from, until) + override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) + + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext + + override def head: A = headOption.get + override def headOption: Option[A] = { + val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) + (elem, until) match { + case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None + case _ => elem + } + } + + override def last: A = lastOption.get + override def lastOption = { + val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) + (elem, from) match { + case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None + case _ => elem + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized + // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: A => U): Unit = iterator.foreach(f) + + override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) + + } + +} + +/** + * $factoryInfo + * @define Coll `mutable.TreeSet` + * @define coll mutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + + def from[E](it: IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => + new TreeSet[E](ts.tree.treeCopy()) + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) + case _ => + val t: RB.Tree[E, Null] = RB.Tree.empty + val i = it.iterator + while (i.hasNext) RB.insert(t, i.next(), null) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty + def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } + def result(): TreeSet[A] = new TreeSet[A](tree) + def clear(): Unit = { tree = RB.Tree.empty } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala new file mode 100644 index 000000000000..489f2a1b0387 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala @@ -0,0 +1,442 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag +import scala.collection.immutable.Nil + +/** A buffer that stores elements in an unrolled linked list. + * + * Unrolled linked lists store elements in linked fixed size + * arrays. + * + * Unrolled buffers retain locality and low memory overhead + * properties of array buffers, but offer much more efficient + * element addition, since they never reallocate and copy the + * internal array. + * + * However, they provide `O(n/m)` complexity random access, + * where `n` is the number of elements, and `m` the size of + * internal array chunks. + * + * Ideal to use when: + * - elements are added to the buffer and then all of the + * elements are traversed sequentially + * - two unrolled buffers need to be concatenated (see `concat`) + * + * Better than singly linked lists for random access, but + * should still be avoided for such a purpose. + * + * @define coll unrolled buffer + * @define Coll `UnrolledBuffer` + * + */ +@SerialVersionUID(3L) +sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) + extends AbstractBuffer[T] + with Buffer[T] + with Seq[T] + with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] + with Builder[T, UnrolledBuffer[T]] + with DefaultSerializable { + + import UnrolledBuffer.Unrolled + + @transient private var headptr = newUnrolled + @transient private var lastptr = headptr + @transient private var sz = 0 + + private[collection] def headPtr = headptr + private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head + private[collection] def lastPtr = lastptr + private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last + private[collection] def size_=(s: Int) = sz = s + + protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer + protected def iterableEvidence: ClassTag[T] = tag + + override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged + + protected def newUnrolled = new Unrolled[T](this) + + // The below would allow more flexible behavior without requiring inheritance + // that is risky because all the important internals are private. + // private var myLengthPolicy: Int => Int = x => x + // + // /** Specifies how the array lengths should vary. + // * + // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length + // * policy can be given that changes this scheme to, for instance, an + // * exponential growth. + // * + // * @param nextLength computes the length of the next array from the length of the latest one + // */ + // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } + private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) + + def classTagCompanion = UnrolledBuffer + + /** Concatenates the target unrolled buffer to this unrolled buffer. + * + * The specified buffer `that` is cleared after this operation. This is + * an O(1) operation. + * + * @param that the unrolled buffer whose elements are added to this buffer + */ + def concat(that: UnrolledBuffer[T]) = { + // bind the two together + if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr + + // update size + sz += that.sz + + // `that` is no longer usable, so clear it + // here we rely on the fact that `clear` allocates + // new nodes instead of modifying the previous ones + that.clear() + + // return a reference to this + this + } + + def addOne(elem: T) = { + lastptr = lastptr.append(elem) + sz += 1 + this + } + + def clear(): Unit = { + headptr = newUnrolled + lastptr = headptr + sz = 0 + } + + def iterator: Iterator[T] = new AbstractIterator[T] { + var pos: Int = -1 + var node: Unrolled[T] = headptr + scan() + + private def scan(): Unit = { + pos += 1 + while (pos >= node.size) { + pos = 0 + node = node.next + if (node eq null) return + } + } + def hasNext = node ne null + def next() = if (hasNext) { + val r = node.array(pos) + scan() + r + } else Iterator.empty.next() + } + + // this should be faster than the iterator + override def foreach[U](f: T => U) = headptr.foreach(f) + + def result() = this + + def length = sz + + override def knownSize: Int = sz + + def apply(idx: Int) = + if (idx >= 0 && idx < sz) headptr(idx) + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def update(idx: Int, newelem: T) = + if (idx >= 0 && idx < sz) headptr(idx) = newelem + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def mapInPlace(f: T => T): this.type = { + headptr.mapInPlace(f) + this + } + + def remove(idx: Int) = + if (idx >= 0 && idx < sz) { + sz -= 1 + headptr.remove(idx, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + @tailrec final def remove(idx: Int, count: Int): Unit = + if (count > 0) { + remove(idx) + remove(idx, count-1) + } + + def prepend(elem: T) = { + headptr = headptr prepend elem + sz += 1 + this + } + + def insert(idx: Int, elem: T): Unit = + insertAll(idx, elem :: Nil) + + def insertAll(idx: Int, elems: IterableOnce[T]): Unit = + if (idx >= 0 && idx <= sz) { + sz += headptr.insertAll(idx, elems, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + override def subtractOne(elem: T): this.type = { + if (headptr.subtractOne(elem, this)) { + sz -= 1 + } + this + } + + def patchInPlace(from: Int, patch: collection.IterableOnce[T], replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.defaultWriteObject + out writeInt sz + for (elem <- this) out writeObject elem + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + in.defaultReadObject + + val num = in.readInt + + headPtr = newUnrolled + lastPtr = headPtr + sz = 0 + var i = 0 + while (i < num) { + this += in.readObject.asInstanceOf[T] + i += 1 + } + } + + override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this + + override protected[this] def className = "UnrolledBuffer" +} + + +@SerialVersionUID(3L) +object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => + + val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) + + def empty[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + def from[A : ClassTag](source: scala.collection.IterableOnce[A]): UnrolledBuffer[A] = newBuilder[A].addAll(source) + + def newBuilder[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + final val waterline: Int = 50 + + final def waterlineDenom: Int = 100 + + @deprecated("Use waterlineDenom instead.", "2.13.0") + final val waterlineDelim: Int = waterlineDenom + + private[collection] val unrolledlength = 32 + + /** Unrolled buffer node. + */ + class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) + private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) + + private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) + + // adds and returns itself or the new unrolled if full + @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { + array(size) = elem + size += 1 + this + } else { + next = new Unrolled[T](0, new Array[T](nextlength), null, buff) + next append elem + } + def foreach[U](f: T => U): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + def mapInPlace(f: T => T): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + chunkarr(i) = f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + @tailrec final def apply(idx: Int): T = + if (idx < size) array(idx) else next.apply(idx - size) + @tailrec final def update(idx: Int, newelem: T): Unit = + if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) + @tailrec final def locate(idx: Int): Unrolled[T] = + if (idx < size) this else next.locate(idx - size) + def prepend(elem: T) = if (size < array.length) { + // shift the elements of the array right + // then insert the element + shiftright() + array(0) = elem + size += 1 + this + } else { + // allocate a new node and store element + // then make it point to this + val newhead = new Unrolled[T](buff) + newhead append elem + newhead.next = this + newhead + } + // shifts right assuming enough space + private def shiftright(): Unit = { + var i = size - 1 + while (i >= 0) { + array(i + 1) = array(i) + i -= 1 + } + } + // returns pointer to new last if changed + @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = + if (idx < size) { + // remove the element + // then try to merge with the next bucket + val r = array(idx) + shiftleft(idx) + size -= 1 + if (tryMergeWithNext()) buffer.lastPtr = this + r + } else next.remove(idx - size, buffer) + + @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { + var i = 0 + while (i < size) { + if(array(i) == elem) { + remove(i, buffer) + return true + } + i += 1 + } + if(next ne null) next.subtractOne(elem, buffer) else false + } + + // shifts left elements after `leftb` (overwrites `leftb`) + private def shiftleft(leftb: Int): Unit = { + var i = leftb + while (i < (size - 1)) { + array(i) = array(i + 1) + i += 1 + } + nullout(i, i + 1) + } + protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { + // copy the next array, then discard the next node + Array.copy(next.array, 0, array, size, next.size) + size = size + next.size + next = next.next + if (next eq null) true else false // checks if last node was thrown out + } else false + + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T], buffer: UnrolledBuffer[T]): Int = { + if (idx < size) { + // divide this node at the appropriate position and insert all into head + // update new next + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + Array.copy(array, idx, newnextnode.array, 0, size - idx) + newnextnode.size = size - idx + newnextnode.next = next + + // update this + nullout(idx, size) + size = idx + next = null + + // insert everything from iterable to this + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + curr.next = newnextnode + + // try to merge the last node of this with the newnextnode and fix tail pointer if needed + if (curr.tryMergeWithNext()) buffer.lastPtr = curr + else if (newnextnode.next eq null) buffer.lastPtr = newnextnode + appended + } + else if (idx == size || (next eq null)) { + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + appended + } + else next.insertAll(idx - size, t, buffer) + } + + private def nullout(from: Int, until: Int): Unit = { + var idx = from + while (idx < until) { + array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! + idx += 1 + } + } + + // assumes this is the last node + // `thathead` and `thatlast` are head and last node + // of the other unrolled list, respectively + def bind(thathead: Unrolled[T]) = { + assert(next eq null) + next = thathead + tryMergeWithNext() + } + + override def toString: String = + array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") + } +} + +// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: +// Todo -- revisit whether inheritance is the best way to achieve this functionality +private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { + override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz + override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala new file mode 100644 index 000000000000..7286a318e1f9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala @@ -0,0 +1,55 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} + +/** A hash map with references to entries which are weakly reachable. Entries are + * removed from this map when the key is no longer (strongly) referenced. This class wraps + * `java.util.WeakHashMap`. + * + * @tparam K type of keys contained in this map + * @tparam V type of values associated with the keys + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] + * section on `Weak Hash Maps` for more information. + * + * @define Coll `WeakHashMap` + * @define coll weak hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) + with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] + with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { + override def empty = new WeakHashMap[K, V] + override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "WeakHashMap" +} + +/** $factoryInfo + * @define Coll `WeakHashMap` + * @define coll weak hash map + */ +@SerialVersionUID(3L) +object WeakHashMap extends MapFactory[WeakHashMap] { + def empty[K, V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[K, V](it: collection.IterableOnce[(K, V)]): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) +} + diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala new file mode 100644 index 000000000000..4915e8a48b22 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/package.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object mutable { + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + type WrappedArray[X] = ArraySeq[X] + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + val WrappedArray = ArraySeq + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + type ArrayStack[X] = Stack[X] + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + val ArrayStack = Stack + + @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") + type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] + + @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") + type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] + + @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") + type IndexedOptimizedSeq[A] = IndexedSeq[A] + + @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") + type IndexedOptimizedBuffer[A] = IndexedBuffer[A] +} diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala new file mode 100644 index 000000000000..954573ff1ddd --- /dev/null +++ b/tests/pos-special/stdlib/collection/package.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +package object collection { + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+X] = IterableOnce[X] + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + val TraversableOnce = IterableOnce + @deprecated("Use SeqOps instead of SeqLike", "2.13.0") + type SeqLike[A, T] = SeqOps[A, Seq, T] + @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") + type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] + + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversableOnce[+X] = IterableOnce[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversableOnce = IterableOnce + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenIterable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenIterable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSeq[+X] = Seq[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSeq = Seq + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSet[X] = Set[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSet = Set + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenMap[K, +V] = Map[K, V] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenMap = Map + + /** Needed to circumvent a difficulty between dotty and scalac concerning + * the right top type for a type parameter of kind * -> *. + * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. + * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. + */ + private[scala] type AnyConstr[X] = Any + + /** An extractor used to head/tail deconstruct sequences. */ + object +: { + /** Splits a sequence into head +: tail. + * @return Some((head, tail)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = + if(t.isEmpty) None + else Some(t.head -> t.tail) + } + + /** An extractor used to init/last deconstruct sequences. */ + object :+ { + /** Splits a sequence into init :+ last. + * @return Some((init, last)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = + if(t.isEmpty) None + else Some(t.init -> t.last) + } +} From 16966fd7719d22b3af6ded2b4278d58bfbf9b068 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Mon, 25 Sep 2023 17:51:19 +0200 Subject: [PATCH 160/216] Add attributes section to TASTy --- .../src/dotty/tools/dotc/core/Flags.scala | 4 +-- .../dotc/core/tasty/AttributePickler.scala | 26 +++++++++++++++ .../dotc/core/tasty/AttributeUnpickler.scala | 33 +++++++++++++++++++ .../tools/dotc/core/tasty/Attributes.scala | 6 ++++ .../dotc/core/tasty/DottyUnpickler.scala | 21 ++++++++---- .../tools/dotc/core/tasty/ScratchData.scala | 2 ++ .../tools/dotc/core/tasty/TastyPrinter.scala | 26 ++++++++++++--- .../tools/dotc/core/tasty/TreeUnpickler.scala | 15 +++++++-- .../dotc/transform/ExtensionMethods.scala | 2 +- .../dotty/tools/dotc/transform/Pickler.scala | 6 ++++ scala2-library-tasty-tests/src/Main.scala | 7 ++++ tasty/src/dotty/tools/tasty/TastyFormat.scala | 21 +++++++++++- 12 files changed, 152 insertions(+), 17 deletions(-) create mode 100644 compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala create mode 100644 compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala create mode 100644 compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 47b5c9f17af2..6ae9541a327f 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -308,8 +308,8 @@ object Flags { */ val (_, StableRealizable @ _, _) = newFlags(24, "") - /** A case parameter accessor */ - val (_, CaseAccessor @ _, _) = newFlags(25, "") + /** A case parameter accessor / an unpickled Scala 2 TASTy (only for Scala 2 stdlib) */ + val (_, CaseAccessor @ _, Scala2Tasty @ _) = newFlags(25, "", "") /** A Scala 2x super accessor / an unpickled Scala 2.x class */ val (SuperParamAliasOrScala2x @ _, SuperParamAlias @ _, Scala2x @ _) = newFlags(26, "", "") diff --git a/compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala new file mode 100644 index 000000000000..669d41910d57 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/AttributePickler.scala @@ -0,0 +1,26 @@ +package dotty.tools.dotc.core.tasty + +import dotty.tools.dotc.ast.{tpd, untpd} + +import dotty.tools.tasty.TastyBuffer +import dotty.tools.tasty.TastyFormat, TastyFormat.AttributesSection + +import java.nio.charset.StandardCharsets + +object AttributePickler: + + def pickleAttributes( + attributes: Attributes, + pickler: TastyPickler, + buf: TastyBuffer + ): Unit = + if attributes.scala2StandardLibrary || attributes.explicitNulls then // or any other attribute is set + pickler.newSection(AttributesSection, buf) + // Pickle attributes + if attributes.scala2StandardLibrary then buf.writeNat(TastyFormat.SCALA2STANDARDLIBRARYattr) + if attributes.explicitNulls then buf.writeNat(TastyFormat.EXPLICITNULLSattr) + end if + + end pickleAttributes + +end AttributePickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala new file mode 100644 index 000000000000..206b4b799ac3 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala @@ -0,0 +1,33 @@ +package dotty.tools.dotc +package core.tasty + +import scala.language.unsafeNulls + +import dotty.tools.tasty.{TastyFormat, TastyReader, TastyBuffer} + +import java.nio.charset.StandardCharsets + +class AttributeUnpickler(reader: TastyReader): + import reader._ + + lazy val attributeTags: List[Int] = + val listBuilder = List.newBuilder[Int] + while !isAtEnd do listBuilder += readNat() + listBuilder.result() + + lazy val attributes: Attributes = { + var scala2StandardLibrary = false + var explicitNulls = false + for attributeTag <- attributeTags do + attributeTag match + case TastyFormat.SCALA2STANDARDLIBRARYattr => scala2StandardLibrary = true + case TastyFormat.EXPLICITNULLSattr => explicitNulls = true + case attribute => + assert(false, "Unexpected attribute value: " + attribute) + Attributes( + scala2StandardLibrary, + explicitNulls, + ) + } + +end AttributeUnpickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala b/compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala new file mode 100644 index 000000000000..77d2d391bd98 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/Attributes.scala @@ -0,0 +1,6 @@ +package dotty.tools.dotc.core.tasty + +class Attributes( + val scala2StandardLibrary: Boolean, + val explicitNulls: Boolean, +) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index bb818edc1f82..84e70cc91663 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -13,17 +13,17 @@ import Names.SimpleName import TreeUnpickler.UnpickleMode import dotty.tools.tasty.TastyReader -import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection} +import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection, AttributesSection} object DottyUnpickler { /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) - class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler]) + class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler], attributeUnpickler: Option[AttributeUnpickler]) extends SectionUnpickler[TreeUnpickler](ASTsSection) { def unpickle(reader: TastyReader, nameAtRef: NameTable): TreeUnpickler = - new TreeUnpickler(reader, nameAtRef, posUnpickler, commentUnpickler) + new TreeUnpickler(reader, nameAtRef, posUnpickler, commentUnpickler, attributeUnpickler) } class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler](PositionsSection) { @@ -35,6 +35,10 @@ object DottyUnpickler { def unpickle(reader: TastyReader, nameAtRef: NameTable): CommentUnpickler = new CommentUnpickler(reader) } + class AttributesSectionUnpickler extends SectionUnpickler[AttributeUnpickler](AttributesSection) { + def unpickle(reader: TastyReader, nameAtRef: NameTable): AttributeUnpickler = + new AttributeUnpickler(reader) + } } /** A class for unpickling Tasty trees and symbols. @@ -48,7 +52,8 @@ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLe val unpickler: TastyUnpickler = new TastyUnpickler(bytes) private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler) private val commentUnpicklerOpt = unpickler.unpickle(new CommentsSectionUnpickler) - private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt)).get + private val attributeUnpicklerOpt = unpickler.unpickle(new AttributesSectionUnpickler) + private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt, attributeUnpicklerOpt)).get /** Enter all toplevel classes and objects into their scopes * @param roots a set of SymDenotations that should be overwritten by unpickling @@ -56,8 +61,12 @@ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLe def enter(roots: Set[SymDenotation])(using Context): Unit = treeUnpickler.enter(roots) - protected def treeSectionUnpickler(posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler]): TreeSectionUnpickler = - new TreeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt) + protected def treeSectionUnpickler( + posUnpicklerOpt: Option[PositionUnpickler], + commentUnpicklerOpt: Option[CommentUnpickler], + attributeUnpicklerOpt: Option[AttributeUnpickler] + ): TreeSectionUnpickler = + new TreeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt, attributeUnpicklerOpt) protected def computeRootTrees(using Context): List[Tree] = treeUnpickler.unpickle(mode) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala index b36c78a77ac6..889cf31a40b0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/ScratchData.scala @@ -10,6 +10,7 @@ class ScratchData: val pickledIndices = new mutable.BitSet val commentBuffer = new TastyBuffer(5000) + val attributeBuffer = new TastyBuffer(32) def reset() = assert(delta ne delta1) @@ -17,4 +18,5 @@ class ScratchData: positionBuffer.reset() pickledIndices.clear() commentBuffer.reset() + attributeBuffer.reset() diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 9fe3fb282aa2..ae15421c82f3 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -9,7 +9,7 @@ import Contexts.*, Decorators.* import Names.Name import TastyUnpickler.* import util.Spans.offsetToInt -import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection} +import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection, AttributesSection} import java.nio.file.{Files, Paths} import dotty.tools.io.{JarArchive, Path} @@ -84,14 +84,16 @@ class TastyPrinter(bytes: Array[Byte]) { case Some(s) => sb.append(s) case _ => } - sb.append("\n\n") unpickle(new PositionSectionUnpickler) match { - case Some(s) => sb.append(s) + case Some(s) => sb.append("\n\n").append(s) case _ => } - sb.append("\n\n") unpickle(new CommentSectionUnpickler) match { - case Some(s) => sb.append(s) + case Some(s) => sb.append("\n\n").append(s) + case _ => + } + unpickle(new AttributesSectionUnpickler) match { + case Some(s) => sb.append("\n\n").append(s) case _ => } sb.result @@ -222,6 +224,20 @@ class TastyPrinter(bytes: Array[Byte]) { } } + class AttributesSectionUnpickler extends SectionUnpickler[String](AttributesSection) { + import dotty.tools.tasty.TastyFormat.attributeTagToString + private val sb: StringBuilder = new StringBuilder + + def unpickle(reader: TastyReader, tastyName: NameTable): String = { + sb.append(s" ${reader.endAddr.index - reader.currentAddr.index}") + val attributeTags = new AttributeUnpickler(reader).attributeTags + sb.append(s" attributes bytes:\n") + for attributeTag <- attributeTags do + sb.append(" ").append(attributeTagToString(attributeTag)).append("\n") + sb.result + } + } + protected def nameStr(str: String): String = str protected def treeStr(str: String): String = str protected def lengthStr(str: String): String = str diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 6a02605b6ed7..3d3481f371f7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -52,11 +52,13 @@ import scala.compiletime.uninitialized * @param reader the reader from which to unpickle * @param posUnpicklerOpt the unpickler for positions, if it exists * @param commentUnpicklerOpt the unpickler for comments, if it exists + * @param attributeUnpicklerOpt the unpickler for attributes, if it exists */ class TreeUnpickler(reader: TastyReader, nameAtRef: NameTable, posUnpicklerOpt: Option[PositionUnpickler], - commentUnpicklerOpt: Option[CommentUnpickler]) { + commentUnpicklerOpt: Option[CommentUnpickler], + attributeUnpicklerOpt: Option[AttributeUnpickler]) { import TreeUnpickler.* import tpd.* @@ -97,6 +99,14 @@ class TreeUnpickler(reader: TastyReader, /** Was unpickled class compiled with capture checks? */ private var withCaptureChecks: Boolean = false + private val unpicklingScala2Library = + attributeUnpicklerOpt.exists(_.attributes.scala2StandardLibrary) + + /** This dependency was compiled with explicit nulls enabled */ + // TODO Use this to tag the symbols of this dependency as compiled with explicit nulls (see use of unpicklingScala2Library). + private val explicitNulls = + attributeUnpicklerOpt.exists(_.attributes.explicitNulls) + private def registerSym(addr: Addr, sym: Symbol) = symAtAddr(addr) = sym @@ -601,7 +611,8 @@ class TreeUnpickler(reader: TastyReader, val rhsStart = currentAddr val rhsIsEmpty = nothingButMods(end) if (!rhsIsEmpty) skipTree() - val (givenFlags, annotFns, privateWithin) = readModifiers(end) + val (givenFlags0, annotFns, privateWithin) = readModifiers(end) + val givenFlags = if isClass && unpicklingScala2Library then givenFlags0 | Scala2x | Scala2Tasty else givenFlags0 pickling.println(i"creating symbol $name at $start with flags ${givenFlags.flagsString}, isAbsType = $isAbsType, $ttag") val flags = normalizeFlags(tag, givenFlags, name, isAbsType, rhsIsEmpty) def adjustIfModule(completer: LazyType) = diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index 8b3a783745fb..66b6759d9900 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -77,7 +77,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete // Create extension methods, except if the class comes from Scala 2 // because it adds extension methods before pickling. - if (!(valueClass.is(Scala2x))) + if !valueClass.is(Scala2x, butNot = Scala2Tasty) then for (decl <- valueClass.classInfo.decls) if isMethodWithExtension(decl) then enterInModuleClass(createExtensionMethod(decl, moduleClassSym.symbol)) diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 4aea14fed2fc..57b8da058073 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -108,6 +108,12 @@ class Pickler extends Phase { pickler, treePkl.buf.addrOfTree, treePkl.docString, tree, scratch.commentBuffer) + val attributes = Attributes( + scala2StandardLibrary = ctx.settings.YcompileScala2Library.value, + explicitNulls = ctx.settings.YexplicitNulls.value, + ) + AttributePickler.pickleAttributes(attributes, pickler, scratch.attributeBuffer) + val pickled = pickler.assembleParts() def rawBytes = // not needed right now, but useful to print raw format. diff --git a/scala2-library-tasty-tests/src/Main.scala b/scala2-library-tasty-tests/src/Main.scala index b579baf6513d..b33219271201 100644 --- a/scala2-library-tasty-tests/src/Main.scala +++ b/scala2-library-tasty-tests/src/Main.scala @@ -17,6 +17,7 @@ object HelloWorld: testScala2ObjectParents() testScala2CaseClassUnderscoreMembers() testScalaNumberUnderlying() + testArrayOps() scala.collection.mutable.UnrolledBufferTest.test() } @@ -68,3 +69,9 @@ object HelloWorld: val _: Object = MyNumber2(BigInt(1)).underlying val _: Object = (MyNumber2(BigInt(1)): ScalaNumber).underlying } + + def testArrayOps() = { + new collection.ArrayOps[String](Array[String]("foo")).exists(x => true) + } + +end HelloWorld diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index d91295f06af5..7e412a5e67a7 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -265,8 +265,15 @@ All elements of a position section are serialized as Ints Standard Section: "Comments" Comment* ```none - Comment = Length Bytes LongInt // Raw comment's bytes encoded as UTF-8, followed by the comment's coordinates. + Comment = UTF8 LongInt // Raw comment's bytes encoded as UTF-8, followed by the comment's coordinates. ``` + +Standard Section: "Attributes" Attribute* +```none + Attribute = SCALA2STANDARDLIBRARYattr + EXPLICITNULLSattr +``` + **************************************************************************************/ object TastyFormat { @@ -361,6 +368,7 @@ object TastyFormat { final val ASTsSection = "ASTs" final val PositionsSection = "Positions" final val CommentsSection = "Comments" + final val AttributesSection = "Attributes" /** Tags used to serialize names, should update [[TastyFormat$.nameTagToString]] if a new constant is added */ class NameTags { @@ -597,6 +605,12 @@ object TastyFormat { final val firstNatASTTreeTag = IDENT final val firstLengthTreeTag = PACKAGE + + // Attributes tags + + final val SCALA2STANDARDLIBRARYattr = 1 + final val EXPLICITNULLSattr = 2 + /** Useful for debugging */ def isLegalTag(tag: Int): Boolean = firstSimpleTreeTag <= tag && tag <= SPLITCLAUSE || @@ -812,6 +826,11 @@ object TastyFormat { case HOLE => "HOLE" } + def attributeTagToString(tag: Int): String = tag match { + case SCALA2STANDARDLIBRARYattr => "SCALA2STANDARDLIBRARYattr" + case EXPLICITNULLSattr => "EXPLICITNULLSattr" + } + /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry. * If negative, minus the number of leading non-reference trees. */ From b5bfbab2eb699d064850de289c9928a1a221c903 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 17:54:30 +0100 Subject: [PATCH 161/216] Change stdlib collections to be compiled under capture checking --- tests/pos-special/stdlib/Test1.scala | 34 ++ tests/pos-special/stdlib/Test2.scala | 232 +++++++ .../stdlib/collection/ArrayOps.scala | 113 ++-- .../stdlib/collection/BitSet.scala | 6 +- .../stdlib/collection/BufferedIterator.scala | 2 +- .../stdlib/collection/BuildFrom.scala | 36 +- .../stdlib/collection/DefaultMap.scala | 2 +- .../stdlib/collection/Factory.scala | 106 ++-- .../stdlib/collection/Hashing.scala | 1 + .../stdlib/collection/IndexedSeq.scala | 9 +- .../stdlib/collection/IndexedSeqView.scala | 85 +-- .../stdlib/collection/Iterable.scala | 157 ++--- .../stdlib/collection/IterableOnce.scala | 145 ++--- .../stdlib/collection/Iterator.scala | 148 ++--- .../stdlib/collection/JavaConverters.scala | 1 + .../stdlib/collection/LazyZipOps.scala | 21 +- .../stdlib/collection/LinearSeq.scala | 9 +- tests/pos-special/stdlib/collection/Map.scala | 36 +- .../stdlib/collection/MapView.scala | 69 ++- .../stdlib/collection/Searching.scala | 1 + tests/pos-special/stdlib/collection/Seq.scala | 52 +- .../stdlib/collection/SeqMap.scala | 1 + .../stdlib/collection/SeqView.scala | 105 ++-- tests/pos-special/stdlib/collection/Set.scala | 10 +- .../stdlib/collection/SortedMap.scala | 12 +- .../stdlib/collection/SortedOps.scala | 1 + .../stdlib/collection/SortedSet.scala | 5 +- .../stdlib/collection/Stepper.scala | 40 +- .../stdlib/collection/StepperShape.scala | 3 +- .../StrictOptimizedIterableOps.scala | 24 +- .../collection/StrictOptimizedMapOps.scala | 8 +- .../collection/StrictOptimizedSeqOps.scala | 14 +- .../collection/StrictOptimizedSetOps.scala | 1 + .../StrictOptimizedSortedMapOps.scala | 3 +- .../stdlib/collection/StringOps.scala | 37 +- .../stdlib/collection/StringParsers.scala | 17 +- .../pos-special/stdlib/collection/View.scala | 140 +++-- .../stdlib/collection/WithFilter.scala | 8 +- .../stdlib/collection/concurrent/Map.scala | 1 + .../collection/concurrent/TrieMap.scala | 39 +- .../collection/convert/AsJavaConverters.scala | 1 + .../collection/convert/AsJavaExtensions.scala | 1 + .../convert/AsScalaConverters.scala | 1 + .../convert/AsScalaExtensions.scala | 1 + .../convert/ImplicitConversions.scala | 1 + .../convert/JavaCollectionWrappers.scala | 12 +- .../collection/convert/StreamExtensions.scala | 1 + .../convert/impl/ArrayStepper.scala | 1 + .../convert/impl/BinaryTreeStepper.scala | 3 +- .../convert/impl/BitSetStepper.scala | 9 +- .../convert/impl/ChampStepper.scala | 1 + .../convert/impl/InOrderStepperBase.scala | 1 + .../convert/impl/IndexedSeqStepper.scala | 1 + .../convert/impl/IndexedStepperBase.scala | 1 + .../convert/impl/IteratorStepper.scala | 1 + .../convert/impl/NumericRangeStepper.scala | 1 + .../convert/impl/RangeStepper.scala | 3 +- .../convert/impl/StringStepper.scala | 1 + .../convert/impl/TableStepper.scala | 1 + .../convert/impl/VectorStepper.scala | 7 +- .../collection/generic/BitOperations.scala | 1 + .../generic/DefaultSerializationProxy.scala | 5 +- .../collection/generic/IsIterable.scala | 1 + .../collection/generic/IsIterableOnce.scala | 1 + .../stdlib/collection/generic/IsMap.scala | 1 + .../stdlib/collection/generic/IsSeq.scala | 41 +- .../collection/generic/Subtractable.scala | 1 + .../stdlib/collection/generic/package.scala | 1 + .../collection/immutable/ArraySeq.scala | 53 +- .../stdlib/collection/immutable/BitSet.scala | 5 +- .../collection/immutable/ChampCommon.scala | 3 +- .../stdlib/collection/immutable/HashMap.scala | 22 +- .../stdlib/collection/immutable/HashSet.scala | 22 +- .../stdlib/collection/immutable/IntMap.scala | 18 +- .../collection/immutable/Iterable.scala | 4 +- ...{LazyList.scala => LazyListIterable.scala} | 499 ++++++++------- .../stdlib/collection/immutable/List.scala | 25 +- .../stdlib/collection/immutable/ListMap.scala | 12 +- .../stdlib/collection/immutable/ListSet.scala | 4 +- .../stdlib/collection/immutable/LongMap.scala | 16 +- .../stdlib/collection/immutable/Map.scala | 32 +- .../collection/immutable/NumericRange.scala | 4 +- .../stdlib/collection/immutable/Queue.scala | 7 +- .../stdlib/collection/immutable/Range.scala | 5 +- .../collection/immutable/RedBlackTree.scala | 7 +- .../stdlib/collection/immutable/Seq.scala | 12 +- .../stdlib/collection/immutable/SeqMap.scala | 10 +- .../stdlib/collection/immutable/Set.scala | 12 +- .../collection/immutable/SortedMap.scala | 13 +- .../collection/immutable/SortedSet.scala | 3 +- .../stdlib/collection/immutable/Stream.scala | 568 ------------------ .../immutable/StrictOptimizedSeqOps.scala | 8 +- .../stdlib/collection/immutable/TreeMap.scala | 16 +- .../collection/immutable/TreeSeqMap.scala | 26 +- .../stdlib/collection/immutable/TreeSet.scala | 9 +- .../stdlib/collection/immutable/Vector.scala | 54 +- .../collection/immutable/VectorMap.scala | 8 +- .../collection/immutable/WrappedString.scala | 18 +- .../stdlib/collection/immutable/package.scala | 2 +- .../stdlib/collection/mutable/AnyRefMap.scala | 60 +- .../collection/mutable/ArrayBuffer.scala | 35 +- .../collection/mutable/ArrayBuilder.scala | 7 +- .../collection/mutable/ArrayDeque.scala | 19 +- .../stdlib/collection/mutable/ArraySeq.scala | 26 +- .../stdlib/collection/mutable/BitSet.scala | 9 +- .../stdlib/collection/mutable/Buffer.scala | 22 +- .../stdlib/collection/mutable/Builder.scala | 14 +- .../mutable/CheckedIndexedSeqView.scala | 63 +- .../stdlib/collection/mutable/Cloneable.scala | 2 +- .../mutable/CollisionProofHashMap.scala | 35 +- .../stdlib/collection/mutable/Growable.scala | 8 +- .../collection/mutable/GrowableBuilder.scala | 4 +- .../stdlib/collection/mutable/HashMap.scala | 19 +- .../stdlib/collection/mutable/HashSet.scala | 19 +- .../stdlib/collection/mutable/HashTable.scala | 5 +- .../collection/mutable/ImmutableBuilder.scala | 1 + .../collection/mutable/IndexedSeq.scala | 1 + .../stdlib/collection/mutable/Iterable.scala | 5 +- .../collection/mutable/LinkedHashMap.scala | 11 +- .../collection/mutable/LinkedHashSet.scala | 11 +- .../collection/mutable/ListBuffer.scala | 20 +- .../stdlib/collection/mutable/ListMap.scala | 9 +- .../stdlib/collection/mutable/LongMap.scala | 51 +- .../stdlib/collection/mutable/Map.scala | 13 +- .../stdlib/collection/mutable/MultiMap.scala | 3 +- .../collection/mutable/MutationTracker.scala | 3 +- .../collection/mutable/OpenHashMap.scala | 11 +- .../collection/mutable/PriorityQueue.scala | 17 +- .../stdlib/collection/mutable/Queue.scala | 9 +- .../collection/mutable/RedBlackTree.scala | 33 +- .../collection/mutable/ReusableBuilder.scala | 1 + .../stdlib/collection/mutable/Seq.scala | 1 + .../stdlib/collection/mutable/SeqMap.scala | 1 + .../stdlib/collection/mutable/Set.scala | 1 + .../collection/mutable/Shrinkable.scala | 5 +- .../stdlib/collection/mutable/SortedMap.scala | 9 +- .../stdlib/collection/mutable/SortedSet.scala | 1 + .../stdlib/collection/mutable/Stack.scala | 10 +- .../collection/mutable/StringBuilder.scala | 9 +- .../stdlib/collection/mutable/TreeMap.scala | 9 +- .../stdlib/collection/mutable/TreeSet.scala | 9 +- .../collection/mutable/UnrolledBuffer.scala | 19 +- .../collection/mutable/WeakHashMap.scala | 9 +- .../stdlib/collection/mutable/package.scala | 1 + .../stdlib/collection/package.scala | 1 + 145 files changed, 1983 insertions(+), 1980 deletions(-) create mode 100644 tests/pos-special/stdlib/Test1.scala create mode 100644 tests/pos-special/stdlib/Test2.scala rename tests/pos-special/stdlib/collection/immutable/{LazyList.scala => LazyListIterable.scala} (66%) delete mode 100644 tests/pos-special/stdlib/collection/immutable/Stream.scala diff --git a/tests/pos-special/stdlib/Test1.scala b/tests/pos-special/stdlib/Test1.scala new file mode 100644 index 000000000000..9ee4e7cfa6a1 --- /dev/null +++ b/tests/pos-special/stdlib/Test1.scala @@ -0,0 +1,34 @@ +import language.experimental.captureChecking +import collection.{View, Seq} +import collection.mutable.{ArrayBuffer, ListBuffer} + +import java.io.* + +object Test0: + + def usingLogFile[sealed T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + + def test(xs: List[Int]) = + usingLogFile: f => + xs.map: x => + f.write(x) + x * x + +object Test1: + def test(it: Iterator[Int]^, v: View[Int]^) = + val isEven: Int ->{cap[test]} Boolean = _ % 2 == 0 + val it2 = it.filter(isEven) + val _: Iterator[Int]^{it, isEven} = it2 + val it2c: Iterator[Int]^{it2} = it2 + val v2 = v.filter(isEven) + val _: View[Int]^{v, isEven} = v2 + val v2c: View[Int]^{v2} = v2 + val v3 = v.drop(2) + val _: View[Int]^{v} = v3 + val v3c: View[Int]^{v3} = v3 + val (xs6, xs7) = v.partition(isEven) + val (xs6a, xs7a) = v.partition(_ % 2 == 0) diff --git a/tests/pos-special/stdlib/Test2.scala b/tests/pos-special/stdlib/Test2.scala new file mode 100644 index 000000000000..a59da522b183 --- /dev/null +++ b/tests/pos-special/stdlib/Test2.scala @@ -0,0 +1,232 @@ +import scala.reflect.ClassTag +import language.experimental.captureChecking +import collection.{View, Seq} +import collection.mutable.{ArrayBuffer, ListBuffer} + +object Test { + + def seqOps(xs: Seq[Int]) = { // try with Seq[Int]^{cap} + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int => Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + val x3 = xs.indexWhere(isEven) + val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: Seq[Int] = xs6 + val ys7: Seq[Int] = xs7 + val xs8 = xs.drop(2) + val ys8: Seq[Int] = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: Seq[Boolean] = xs9 + val xs10 = xs.flatMap(flips) + val ys10: Seq[Int] = xs10 + val xs11 = xs ++ xs + val ys11: Seq[Int] = xs11 + val xs12 = xs ++ Nil + val ys12: Seq[Int] = xs12 + val xs13 = Nil ++ xs + val ys13: Seq[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: Seq[Any] = xs14 + val xs15 = xs.zip(xs9) + val ys15: Seq[(Int, Boolean)] = xs15 + val xs16 = xs.reverse + val ys16: Seq[Int] = xs16 + println("-------") + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + println(xs6) + println(xs7) + println(xs8) + println(xs9) + println(xs10) + println(xs11) + println(xs12) + println(xs13) + println(xs14) + println(xs15) + println(xs16) + } + + def iterOps(xs: => Iterator[Int]^) = + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int ->{cap[iterOps]} Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + val x4 = xs.next() + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: Iterator[Int]^{xs6, isEven} = xs6 + val ys7: Iterator[Int]^{xs7, isEven} = xs7 + val (xs6a, xs7a) = xs.partition(_ % 2 == 0) + val ys6a: Iterator[Int]^{xs6} = xs6 + val ys7a: Iterator[Int]^{xs7} = xs7 + val xs8 = xs.drop(2) + val ys8: Iterator[Int]^{xs8} = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: Iterator[Boolean]^{xs9} = xs9 + val xs10 = xs.flatMap(flips) + val ys10: Iterator[Int]^{xs10} = xs10 + val xs11 = xs ++ xs + val ys11: Iterator[Int]^{xs11} = xs11 + val xs12 = xs ++ Nil + val ys12: Iterator[Int]^{xs12} = xs12 + val xs13 = Nil ++ xs + val ys13: List[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: Iterator[Any]^{xs14} = xs14 + val xs15 = xs.zip(xs9) + val ys15: Iterator[(Int, Boolean)]^{xs15} = xs15 + println("-------") + println(x1) + println(x2) + println(x4) + println(x5) + println(xs6.to(List)) + println(xs7.to(List)) + println(xs8.to(List)) + println(xs9.to(List)) + println(xs10.to(List)) + println(xs11.to(List)) + println(xs12.to(List)) + println(xs13.to(List)) + println(xs14.to(List)) + println(xs15.to(List)) + + def viewOps(xs: View[Int]^) = { + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int ->{cap[viewOps]} Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + //val x3 = xs.indexWhere(_ % 2 == 0) // indexWhere does not exist on View + //val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: View[Int]^{xs6, isEven} = xs6 + val ys7: View[Int]^{xs7, isEven} = xs7 + val (xs6a, xs7a) = xs.partition(_ % 2 == 0) + val ys6a: View[Int]^{xs6} = xs6 + val ys7a: View[Int]^{xs7} = xs7 + val xs8 = xs.drop(2) + val ys8: View[Int]^{xs8} = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: View[Boolean]^{xs9} = xs9 + val xs10 = xs.flatMap(flips) + val ys10: View[Int]^{xs10} = xs10 + val xs11 = xs ++ xs + val ys11: View[Int]^{xs11} = xs11 + val xs12 = xs ++ Nil + val ys12: View[Int]^{xs12} = xs12 + val xs13 = Nil ++ xs + val ys13: List[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: View[Any]^{xs14} = xs14 + val xs15 = xs.zip(xs9) + val ys15: View[(Int, Boolean)]^{xs15} = xs15 + println("-------") + println(x1) + println(x2) + println(x4) + println(x5) + println(xs6.to(List)) + println(xs7.to(List)) + println(xs8.to(List)) + println(xs9.to(List)) + println(xs10.to(List)) + println(xs11.to(List)) + println(xs12.to(List)) + println(xs13.to(List)) + println(xs14.to(List)) + println(xs15.to(List)) + } + + def stringOps(xs: String) = { + val x1 = xs.foldLeft("")(_ + _) + val y1: String = x1 + val x2 = xs.foldRight("")(_ + _) + val y2: String = x2 + val x3 = xs.indexWhere(_ % 2 == 0) + val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Char] = x5 + val (xs6, xs7) = xs.partition(_ % 2 == 0) + val ys6: String = xs6 + val ys7: String = xs7 + val xs8 = xs.drop(2) + val ys8: String = xs8 + val xs9 = xs.map(_ + 1) + val ys9: Seq[Int] = xs9 + val xs9a = xs.map(_.toUpper) + val ys9a: String = xs9a + val xs10 = xs.flatMap((x: Char) => s"$x,$x") + val ys10: String = xs10 + val xs11 = xs ++ xs + val ys11: String = xs11 + val ops = collection.StringOps(xs) // !!! otherwise we can a "cannot establish reference" + val xs13 = Nil ++ ops.iterator + val ys13: List[Char] = xs13 + val xs14 = xs ++ ("xyz" :: Nil) + val ys14: Seq[Any] = xs14 + val xs15 = xs.zip(xs9) + val ys15: Seq[(Char, Int)] = xs15 + println("-------") + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + println(xs6) + println(xs7) + println(xs8) + println(xs9) + println(xs9a) + println(xs10) + println(xs11) + println(xs13) + println(xs14) + println(xs15) + } + + def main(args: Array[String]) = { + val ints = List(1, 2, 3) + val intsBuf = ints.to(ArrayBuffer) + val intsListBuf = ints.to(ListBuffer) + val intsView = ints.view + seqOps(ints) + seqOps(intsBuf) + seqOps(intsListBuf) + viewOps(intsView) + iterOps(ints.iterator) + stringOps("abc") + } +} diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala index 485427886625..a52fd0dbd162 100644 --- a/tests/pos-special/stdlib/collection/ArrayOps.scala +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -15,6 +15,7 @@ package collection import java.lang.Math.{max, min} import java.util.Arrays +import language.experimental.captureChecking import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -53,14 +54,14 @@ import scala.util.Sorting object ArrayOps { @SerialVersionUID(3L) - private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + private class ArrayView[sealed A](xs: Array[A]) extends AbstractIndexedSeqView[A] { def length = xs.length def apply(n: Int) = xs(n) override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") } /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ - class WithFilter[A](p: A => Boolean, xs: Array[A]) { + class WithFilter[sealed A](p: A => Boolean, xs: Array[A]) { /** Apply `f` to each element for its side effects. * Note: [U] parameter needed to help scalac's type inference. @@ -82,7 +83,7 @@ object ArrayOps { * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[B: ClassTag](f: A => B): Array[B] = { + def map[sealed B: ClassTag](f: A => B): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while (i < xs.length) { @@ -101,7 +102,7 @@ object ArrayOps { * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + def flatMap[sealed B: ClassTag](f: A => IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while(i < xs.length) { @@ -112,15 +113,15 @@ object ArrayOps { b.result() } - def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = flatMap[B](x => asIterable(f(x))) /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs) } @SerialVersionUID(3L) - private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = 0 private[this] val len = xs.length override def knownSize: Int = len - pos @@ -143,7 +144,7 @@ object ArrayOps { } @SerialVersionUID(3L) - private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private final class ReverseIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = xs.length-1 def hasNext: Boolean = pos >= 0 def next(): A = { @@ -160,7 +161,7 @@ object ArrayOps { } @SerialVersionUID(3L) - private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private final class GroupedIterator[sealed A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { private[this] var pos = 0 def hasNext: Boolean = pos < xs.length def next(): Array[A] = { @@ -196,7 +197,7 @@ object ArrayOps { * * @tparam A type of the elements contained in this array. */ -final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { +final class ArrayOps[sealed A](private val xs: Array[A]) extends AnyVal { @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) @@ -366,7 +367,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} = Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) /** An array containing the first `n` elements of this array. */ @@ -504,7 +505,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + def partitionMap[sealed A1: ClassTag, sealed A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { val res1 = ArrayBuilder.make[A1] val res2 = ArrayBuilder.make[A2] var i = 0 @@ -663,7 +664,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * All these operations apply to those elements of this array * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs) /** Finds index of first occurrence of some value in this array after or at some start index. * @@ -776,7 +777,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Returns `z` if this array is empty. */ def foldLeft[B](z: B)(op: (B, A) => B): B = { - def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { val length = xs.length var v: Any = z var i = 0 @@ -815,7 +816,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * }}} * */ - def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + def scanLeft[sealed B : ClassTag](z: B)(op: (B, A) => B): Array[B] = { var v = z var i = 0 val res = new Array[B](xs.length + 1) @@ -838,7 +839,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * * @return a new array containing the prefix scan of the elements in this array */ - def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + def scan[sealed B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) /** Produces an array containing cumulative results of applying the binary * operator going right to left. @@ -854,7 +855,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * }}} * */ - def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + def scanRight[sealed B : ClassTag](z: B)(op: (A, B) => B): Array[B] = { var v = z var i = xs.length - 1 val res = new Array[B](xs.length + 1) @@ -882,7 +883,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Returns `z` if this array is empty. */ def foldRight[B](z: B)(op: (A, B) => B): B = { - def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { var v = z var i = xs.length - 1 while(i >= 0) { @@ -925,7 +926,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + def map[sealed B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { val len = xs.length val ys = new Array[B](len) if(len > 0) { @@ -962,7 +963,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + def flatMap[sealed B : ClassTag](f: A => IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while(i < xs.length) { @@ -972,7 +973,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { b.result() } - def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = flatMap[B](x => asIterable(f(x))) /** Flattens a two-dimensional array by concatenating all its rows @@ -982,7 +983,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. * @return An array obtained by concatenating rows of this array. */ - def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + def flatten[sealed B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { val b = ArrayBuilder.make[B] val len = xs.length var size = 0 @@ -1015,7 +1016,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + def collect[sealed B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { val fallback: Any => Any = ArrayOps.fallback val b = ArrayBuilder.make[B] var i = 0 @@ -1049,7 +1050,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array containing pairs consisting of corresponding elements of this array and `that`. * The length of the returned array is the minimum of the lengths of this array and `that`. */ - def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + def zip[sealed B](that: IterableOnce[B]): Array[(A, B)] = { val b = new ArrayBuilder.ofRef[(A, B)]() val k = that.knownSize b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) @@ -1094,7 +1095,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * If this array is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this array, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + def zipAll[sealed A1 >: A, sealed B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { val b = new ArrayBuilder.ofRef[(A1, B)]() val k = that.knownSize b.sizeHint(max(k, xs.length)) @@ -1131,26 +1132,26 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with an element appended. */ - def appended[B >: A : ClassTag](x: B): Array[B] = { + def appended[sealed B >: A : ClassTag](x: B): Array[B] = { val dest = Array.copyAs[B](xs, xs.length+1) dest(xs.length) = x dest } - @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + @`inline` final def :+ [sealed B >: A : ClassTag](x: B): Array[B] = appended(x) /** A copy of this array with an element prepended. */ - def prepended[B >: A : ClassTag](x: B): Array[B] = { + def prepended[sealed B >: A : ClassTag](x: B): Array[B] = { val dest = new Array[B](xs.length + 1) dest(0) = x Array.copy(xs, 0, dest, 1, xs.length) dest } - @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + @`inline` final def +: [sealed B >: A : ClassTag](x: B): Array[B] = prepended(x) /** A copy of this array with all elements of a collection prepended. */ - def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + def prependedAll[sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] val k = prefix.knownSize if(k >= 0) b.sizeHint(k + xs.length) @@ -1161,18 +1162,18 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with all elements of an array prepended. */ - def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + def prependedAll[sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { val dest = Array.copyAs[B](prefix, prefix.length+xs.length) Array.copy(xs, 0, dest, prefix.length, xs.length) dest } - @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + @`inline` final def ++: [sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) - @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + @`inline` final def ++: [sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) /** A copy of this array with all elements of a collection appended. */ - def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + def appendedAll[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] val k = suffix.knownSize if(k >= 0) b.sizeHint(k + xs.length) @@ -1182,23 +1183,23 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with all elements of an array appended. */ - def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + def appendedAll[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { val dest = Array.copyAs[B](xs, xs.length+suffix.length) Array.copy(suffix, 0, dest, xs.length, suffix.length) dest } - @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + @`inline` final def :++ [sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + @`inline` final def :++ [sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + @`inline` final def concat[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + @`inline` final def concat[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + @`inline` final def ++[sealed B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) - @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + @`inline` final def ++[sealed B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) /** Tests whether this array contains a given value as an element. * @@ -1217,7 +1218,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param other The patch values * @param replaced The number of values in the original array that are replaced by the patch. */ - def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + def patch[sealed B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { val b = ArrayBuilder.make[B] val k = other.knownSize val r = if(replaced < 0) 0 else replaced @@ -1243,7 +1244,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a pair of Arrays, containing, respectively, the first and second half * of each element pair of this Array. */ - def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + def unzip[sealed A1, sealed A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { val a1 = new Array[A1](xs.length) val a2 = new Array[A2](xs.length) var i = 0 @@ -1272,7 +1273,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a triple of Arrays, containing, respectively, the first, second, and third * elements from each element triple of this Array. */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + def unzip3[sealed A1, sealed A2, sealed A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { val a1 = new Array[A1](xs.length) val a2 = new Array[A2](xs.length) @@ -1294,7 +1295,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param asArray A function that converts elements of this array to rows - arrays of type `B`. * @return An array obtained by replacing elements of this arrays with rows the represent. */ - def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + def transpose[sealed B](implicit asArray: A => Array[B]): Array[Array[B]] = { val aClass = xs.getClass.getComponentType val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() @@ -1345,7 +1346,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @tparam B the type of the elements after being transformed by `f` * @return a new array consisting of all the elements of this array without duplicates. */ - def distinctBy[B](f: A => B): Array[A] = + def distinctBy[B](f: A -> B): Array[A] = ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() /** A copy of this array with an element value appended until a given target length is reached. @@ -1357,7 +1358,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * all elements of this array followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + def padTo[sealed B >: A : ClassTag](len: Int, elem: B): Array[B] = { var i = xs.length val newlen = max(i, len) val dest = Array.copyAs[B](xs, newlen) @@ -1417,7 +1418,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @tparam K the type of keys returned by the discriminator function * @tparam B the type of values returned by the transformation function */ - def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + def groupMap[K, sealed B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { val m = mutable.Map.empty[K, ArrayBuilder[B]] val len = xs.length var i = 0 @@ -1444,7 +1445,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param xs the array to fill. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0) /** Copy elements of this array to another array. * Fills the given array `xs` starting at index `start`. @@ -1455,7 +1456,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param start the starting index within the destination array. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements of this array to another array. * Fills the given array `xs` starting at index `start` with at most `len` values. @@ -1467,7 +1468,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param len the maximal number of elements to copy. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) if (copied > 0) { Array.copy(this.xs, 0, xs, start, copied) @@ -1476,7 +1477,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** Create a copy of this array with the specified element type. */ - def toArray[B >: A: ClassTag]: Array[B] = { + def toArray[sealed B >: A: ClassTag]: Array[B] = { val destination = new Array[B](xs.length) copyToArray(destination, 0) destination @@ -1495,7 +1496,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { // can't use a default arg because we already have another overload with a default arg /** Tests whether this array starts with the given array. */ - @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + @`inline` def startsWith[sealed B >: A](that: Array[B]): Boolean = startsWith(that, 0) /** Tests whether this array contains the given array at a given index. * @@ -1504,7 +1505,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return `true` if the array `that` is contained in this array at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + def startsWith[sealed B >: A](that: Array[B], offset: Int): Boolean = { val safeOffset = offset.max(0) val thatl = that.length if(thatl > xs.length-safeOffset) thatl == 0 @@ -1523,7 +1524,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param that the array to test * @return `true` if this array has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Array[B]): Boolean = { + def endsWith[sealed B >: A](that: Array[B]): Boolean = { val thatl = that.length val off = xs.length - thatl if(off < 0) false @@ -1543,7 +1544,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. */ - def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + def updated[sealed B >: A : ClassTag](index: Int, elem: B): Array[B] = { if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") val dest = toArray[B] dest(index) = elem diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala index e8ca89806455..39c15dbe808f 100644 --- a/tests/pos-special/stdlib/collection/BitSet.scala +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -18,7 +18,7 @@ import java.io.{ObjectInputStream, ObjectOutputStream} import scala.annotation.nowarn import scala.collection.Stepper.EfficientSplit import scala.collection.mutable.Builder - +import language.experimental.captureChecking /** Base type of bitsets. * @@ -33,7 +33,7 @@ import scala.collection.mutable.Builder * @define Coll `BitSet` */ trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder override def empty: BitSet = bitSetFactory.empty @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") @@ -48,7 +48,7 @@ object BitSet extends SpecificIterableFactory[Int, BitSet] { def empty: BitSet = immutable.BitSet.empty def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder - def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it) @SerialVersionUID(3L) private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala index bc35ee0a25da..cca40dd31d40 100644 --- a/tests/pos-special/stdlib/collection/BufferedIterator.scala +++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala @@ -11,7 +11,7 @@ */ package scala.collection - +import language.experimental.captureChecking /** Buffered iterators are iterators which provide a method `head` * that inspects the next element without discarding it. diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala index bc9c49d9493c..0a3cc199d4dc 100644 --- a/tests/pos-special/stdlib/collection/BuildFrom.scala +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -16,6 +16,8 @@ import scala.annotation.implicitNotFound import scala.collection.mutable.Builder import scala.collection.immutable.WrappedString import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. * Implicit instances of `BuildFrom` are available for all collection types. @@ -26,7 +28,11 @@ import scala.reflect.ClassTag */ @implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") trait BuildFrom[-From, -A, +C] extends Any { self => - def fromSpecific(from: From)(it: IterableOnce[A]): C + def fromSpecific(from: From)(it: IterableOnce[A]^): C + // !!! this is wrong, we need two versions of fromSpecific; one mapping + // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. + // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure + // calls in this file are needed to sweep that problem under the carpet. /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ @@ -37,7 +43,7 @@ trait BuildFrom[-From, -A, +C] extends Any { self => /** Partially apply a BuildFrom to a Factory */ def toFactory(from: From): Factory[A, C] = new Factory[A, C] { - def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it) def newBuilder: Builder[A, C] = self.newBuilder(from) } } @@ -48,42 +54,42 @@ object BuildFrom extends BuildFromLowPriority1 { implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { //TODO: Reuse a prototype instance def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) } /** Build the source collection type from a SortedMapOps */ implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) } implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = new BuildFrom[C, Int, C] { - def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it) def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder } implicit val buildFromString: BuildFrom[String, Char, String] = new BuildFrom[String, Char, String] { - def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it) def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder } implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = new BuildFrom[WrappedString, Char, WrappedString] { - def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it) def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder } - implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + implicit def buildFromArray[sealed A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = new BuildFrom[Array[_], A, Array[A]] { - def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it) def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder } - implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + implicit def buildFromView[A, sealed B]: BuildFrom[View[A], B, View[B]] = new BuildFrom[View[A], B, View[B]] { - def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder } @@ -97,12 +103,12 @@ trait BuildFromLowPriority1 extends BuildFromLowPriority2 { // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) } implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = new BuildFrom[String, A, immutable.IndexedSeq[A]] { - def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] } } @@ -112,11 +118,11 @@ trait BuildFromLowPriority2 { implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { //TODO: Reuse a prototype instance def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure } implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder - def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure } } diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala index cbc61d8c0268..baa9eceadae5 100644 --- a/tests/pos-special/stdlib/collection/DefaultMap.scala +++ b/tests/pos-special/stdlib/collection/DefaultMap.scala @@ -12,7 +12,7 @@ package scala package collection - +import language.experimental.captureChecking /** A default map which builds a default `immutable.Map` implementation for all * transformations. diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala index 2b15f1cc15d1..c45776b62b9c 100644 --- a/tests/pos-special/stdlib/collection/Factory.scala +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -18,6 +18,8 @@ import scala.language.implicitConversions import scala.collection.mutable.Builder import scala.annotation.unchecked.uncheckedVariance import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** * A factory that builds a collection of type `C` with elements of type `A`. @@ -29,14 +31,14 @@ import scala.reflect.ClassTag * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) */ -trait Factory[-A, +C] extends Any { +trait Factory[-A, +C] extends Pure { /** * @return A collection of type `C` containing the same elements * as the source collection `it`. * @param it Source collection */ - def fromSpecific(it: IterableOnce[A]): C + def fromSpecific(it: IterableOnce[A]^): C /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ @@ -48,7 +50,7 @@ object Factory { implicit val stringFactory: Factory[Char, String] = new StringFactory @SerialVersionUID(3L) private class StringFactory extends Factory[Char, String] with Serializable { - def fromSpecific(it: IterableOnce[Char]): String = { + def fromSpecific(it: IterableOnce[Char]^): String = { val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) b ++= it b.result() @@ -56,10 +58,10 @@ object Factory { def newBuilder: Builder[Char, String] = new mutable.StringBuilder() } - implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + implicit def arrayFactory[sealed A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] @SerialVersionUID(3L) - private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): Array[A] = { + private class ArrayFactory[sealed A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): Array[A] = { val b = newBuilder b.sizeHint(scala.math.max(0, it.knownSize)) b ++= it @@ -80,7 +82,7 @@ object Factory { * @define coll collection * @define Coll `Iterable` */ -trait IterableFactory[+CC[_]] extends Serializable { +trait IterableFactory[+CC[_]] extends Serializable, Pure { /** Creates a target $coll from an existing source collection * @@ -88,7 +90,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - def from[A](source: IterableOnce[A]): CC[A] + def from[A](source: IterableOnce[A]^): CC[A]^{source} /** An empty collection * @tparam A the type of the ${coll}'s elements @@ -109,7 +111,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param f the function that's repeatedly applied * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f)) /** Produces a $coll that uses a function `f` to produce elements of type `A` * and update an internal state of type `S`. @@ -121,7 +123,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @tparam S Type of the internal state * @return a $coll that produces elements using `f` until `f` returns `None` */ - def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f)) /** Produces a $coll containing a sequence of increasing of integers. * @@ -150,7 +152,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n` evaluations of `elem`. */ - def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem)) /** Produces a two-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -158,7 +160,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2)(elem)) /** Produces a three-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -167,7 +170,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure /** Produces a four-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -177,8 +181,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4)(elem)) /** Produces a five-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -189,15 +193,15 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4, n5)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4, n5)(elem)) /** Produces a $coll containing values of a given function over a range of integer values starting from 0. * @param n The number of elements in the $coll * @param f The function computing element values * @return A $coll consisting of elements `f(0), ..., f(n -1)` */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f)) /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -206,8 +210,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2)` * for `0 <= i1 < n1` and `0 <= i2 < n2`. */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -217,8 +221,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -229,8 +233,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3, i4)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -242,8 +246,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) /** Concatenates all argument collections into a single $coll. * @@ -271,13 +275,15 @@ object IterableFactory { @SerialVersionUID(3L) private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure + // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] } implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = new BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = + factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary def newBuilder(from: Any) = factory.newBuilder } @@ -285,15 +291,20 @@ object IterableFactory { class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it) def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] } } +// !!! Needed to add this separate trait +trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]: + def from[A](source: IterableOnce[A]^): CC[A] + override def apply[A](elems: A*): CC[A] = from(elems) + /** * @tparam CC Collection type constructor (e.g. `List`) */ -trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] { import SeqFactory.UnapplySeqWrapper final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? } @@ -303,7 +314,7 @@ object SeqFactory { class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it) def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] } @@ -366,6 +377,8 @@ trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFac * @define Coll `Iterable` */ trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + this: SpecificIterableFactory[A, C] => + def empty: C def apply(xs: A*): C = fromSpecific(xs) def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) @@ -381,7 +394,7 @@ trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { * @define coll collection * @define Coll `Iterable` */ -trait MapFactory[+CC[_, _]] extends Serializable { +trait MapFactory[+CC[_, _]] extends Serializable, Pure { /** * An empty Map @@ -391,7 +404,7 @@ trait MapFactory[+CC[_, _]] extends Serializable { /** * A collection of type Map generated from given iterable object. */ - def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V] /** * A collection of type Map that contains given key/value bindings. @@ -424,20 +437,20 @@ object MapFactory { @SerialVersionUID(3L) private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] } implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) def newBuilder(from: Any) = factory.newBuilder[K, V] } @SerialVersionUID(3L) class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) - def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it) def empty[K, V]: C[K, V] = delegate.empty def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder } @@ -454,9 +467,9 @@ object MapFactory { * @define coll collection * @define Coll `Iterable` */ -trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure { - def from[E : Ev](it: IterableOnce[E]): CC[E] + def from[E : Ev](it: IterableOnce[E]^): CC[E] def empty[A : Ev]: CC[A] @@ -517,13 +530,13 @@ object EvidenceIterableFactory { @SerialVersionUID(3L) private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it) def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] } implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it) def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] } @@ -531,7 +544,7 @@ object EvidenceIterableFactory { class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) def empty[A : Ev]: CC[A] = delegate.empty - def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it) def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] } } @@ -668,7 +681,7 @@ object ClassTagIterableFactory { @SerialVersionUID(3L) class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) @@ -734,10 +747,11 @@ trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extend * @define Coll `Iterable` */ trait SortedMapFactory[+CC[_, _]] extends Serializable { + this: SortedMapFactory[CC] => def empty[K : Ordering, V]: CC[K, V] - def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) @@ -764,20 +778,20 @@ object SortedMapFactory { @SerialVersionUID(3L) private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] } implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) def newBuilder(from: Any) = factory.newBuilder[K, V] } @SerialVersionUID(3L) class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) - def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it) def empty[K : Ordering, V]: CC[K, V] = delegate.empty def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder } diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala index 4e1fd872b8b5..772dcf5c65da 100644 --- a/tests/pos-special/stdlib/collection/Hashing.scala +++ b/tests/pos-special/stdlib/collection/Hashing.scala @@ -12,6 +12,7 @@ package scala package collection +import language.experimental.captureChecking protected[collection] object Hashing { diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index a82d5384779a..a2d4cc942231 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -17,6 +17,9 @@ import scala.annotation.{nowarn, tailrec} import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -32,7 +35,7 @@ trait IndexedSeq[+A] extends Seq[A] object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) /** Base trait for indexed Seq operations */ -trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self => def iterator: Iterator[A] = view.iterator @@ -85,7 +88,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) - override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)) + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) @@ -103,7 +106,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def knownSize: Int = length - override final def lengthCompare(that: Iterable[_]): Int = { + override final def lengthCompare(that: Iterable[_]^): Int = { val res = that.sizeCompare(length) // can't just invert the result, because `-Int.MinValue == Int.MinValue` if (res == Int.MinValue) 1 else -res diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala index 737f032d2060..a16e06fa707d 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeqView.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -14,33 +14,38 @@ package scala package collection import scala.annotation.nowarn +import language.experimental.captureChecking +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { + self: IndexedSeqViewOps[A, CC, C]^ => +} /** View defined in terms of indexing a range */ -trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] { self => +trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { + self: IndexedSeqView[A]^ => - override def view: IndexedSeqView[A] = this + override def view: IndexedSeqView[A]^{this} = this @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) - - override def iterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewIterator(this) - override def reverseIterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewReverseIterator(this) - - override def appended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Prepended(elem, this) - override def take(n: Int): IndexedSeqView[A] = new IndexedSeqView.Take(this, n) - override def takeRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.TakeRight(this, n) - override def drop(n: Int): IndexedSeqView[A] = new IndexedSeqView.Drop(this, n) - override def dropRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.DropRight(this, n) - override def map[B](f: A => B): IndexedSeqView[B] = new IndexedSeqView.Map(this, f) - override def reverse: IndexedSeqView[A] = new IndexedSeqView.Reverse(this) - override def slice(from: Int, until: Int): IndexedSeqView[A] = new IndexedSeqView.Slice(this, from, until) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) - - def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(prefix, this) + override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until) + + override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix: String = "IndexedSeqView" @@ -49,7 +54,8 @@ trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] object IndexedSeqView { @SerialVersionUID(3L) - private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewIterator[A]^ => private[this] var current = 0 private[this] var remainder = self.length override def knownSize: Int = remainder @@ -63,7 +69,7 @@ object IndexedSeqView { r } else Iterator.empty.next() - override def drop(n: Int): Iterator[A] = { + override def drop(n: Int): Iterator[A]^{this} = { if (n > 0) { current += n remainder = Math.max(0, remainder - n) @@ -71,7 +77,7 @@ object IndexedSeqView { this } - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value @@ -83,7 +89,8 @@ object IndexedSeqView { } } @SerialVersionUID(3L) - private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewReverseIterator[A]^ => private[this] var remainder = self.length private[this] var pos = remainder - 1 @inline private[this] def _hasNext: Boolean = remainder > 0 @@ -98,7 +105,7 @@ object IndexedSeqView { // from < 0 means don't move pos, until < 0 means don't limit remainder // - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { if (_hasNext) { if (remainder <= from) remainder = 0 // exhausted by big skip else if (from <= 0) { // no skip, pos is same @@ -117,47 +124,47 @@ object IndexedSeqView { } } - /** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */ - type SomeIndexedSeqOps[A] = IndexedSeqOps[A, AnyConstr, _] + /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _] @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A]) + class Id[+A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Id(underlying) with IndexedSeqView[A] @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A) extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^) extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A]) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^) extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A], n: Int) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.Take(underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B) extends SeqView.Map(underlying, f) with IndexedSeqView[B] @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A]) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { override def reverse: IndexedSeqView[A] = underlying match { case x: IndexedSeqView[A] => x case _ => super.reverse @@ -165,7 +172,7 @@ object IndexedSeqView { } @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int) extends AbstractIndexedSeqView[A] { + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { protected val lo = from max 0 protected val hi = (until max 0) min underlying.length protected val len = (hi - lo) max 0 diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 04647f215963..bca80d7be108 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -17,6 +17,7 @@ import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} +import language.experimental.captureChecking /** Base trait for generic collections. * @@ -28,6 +29,7 @@ import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} trait Iterable[+A] extends IterableOnce[A] with IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => // The collection itself @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") @@ -94,7 +96,7 @@ trait Iterable[+A] extends IterableOnce[A] * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) + def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that) } /** Base trait for Iterable operations @@ -132,29 +134,31 @@ trait Iterable[+A] extends IterableOnce[A] * and may be nondeterministic. */ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + this: IterableOps[A, CC, C]^ => + /** * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. */ // Should be `protected def asIterable`, or maybe removed altogether if it's not needed @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") - def toIterable: Iterable[A] + def toIterable: Iterable[A]^{this} /** Converts this $coll to an unspecified Iterable. Will return * the same collection if this instance is already Iterable. * @return An Iterable containing all elements of this $coll. */ @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") - final def toTraversable: Traversable[A] = toIterable + final def toTraversable: Traversable[A]^{this} = toIterable override def isTraversableAgain: Boolean = true /** * @return This collection as a `C`. */ - protected def coll: C + protected def coll: C^{this} @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") - final def repr: C = coll + final def repr: C^{this} = coll /** * Defines how to turn a given `Iterable[A]` into a collection of type `C`. @@ -174,7 +178,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * `Iterable[A]` obtained from `this` collection (as it is the case in the * implementations of operations where we use a `View[A]`), it is safe. */ - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): C^{coll} /** The companion object of this ${coll}, providing various factory methods. * @@ -251,7 +255,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable def lastOption: Option[A] = if (isEmpty) None else Some(last) /** A view over the elements of this collection. */ - def view: View[A] = View.fromIteratorProvider(() => iterator) + def view: View[A]^{this} = View.fromIteratorProvider(() => iterator) /** Compares the size of this $coll to a test value. * @@ -301,7 +305,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * this.sizeIs > size // this.sizeCompare(size) > 0 * }}} */ - @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + @inline final def sizeIs: IterableOps.SizeCompareOps^{this} = new IterableOps.SizeCompareOps(this) /** Compares the size of this $coll to the size of another `Iterable`. * @@ -317,7 +321,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def sizeCompare(that: Iterable[_]): Int = { + def sizeCompare(that: Iterable[_]^): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this sizeCompare thatKnownSize @@ -342,7 +346,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** A view over a slice of the elements of this collection. */ @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - def view(from: Int, until: Int): View[A] = view.slice(from, until) + def view(from: Int, until: Int): View[A]^{this} = view.slice(from, until) /** Transposes this $coll of iterable collections into * a $coll of ${coll}s. @@ -378,7 +382,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @throws IllegalArgumentException if all collections in this $coll * are not of the same size. */ - def transpose[B](implicit asIterable: A => /*<: /*<: Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) - def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + def filterNot(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = true)) /** Creates a non-strict filter of this $coll. * @@ -417,7 +421,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * All these operations apply to those elements of this $coll * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) + def withFilter(p: A => Boolean): collection.WithFilter[A, CC]^{this, p} = new IterableOps.WithFilter(this, p) /** A pair of, first, all elements that satisfy predicate `p` and, second, * all elements that do not. Interesting because it splits a collection in two. @@ -426,15 +430,15 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, * which requires only a single traversal. */ - def partition(p: A => Boolean): (C, C) = { + def partition(p: A => Boolean): (C^{this, p}, C^{this, p}) = { val first = new View.Filter(this, p, false) val second = new View.Filter(this, p, true) (fromSpecific(first), fromSpecific(second)) } - override def splitAt(n: Int): (C, C) = (take(n), drop(n)) + override def splitAt(n: Int): (C^{this}, C^{this}) = (take(n), drop(n)) - def take(n: Int): C = fromSpecific(new View.Take(this, n)) + def take(n: Int): C^{this} = fromSpecific(new View.Take(this, n)) /** Selects the last ''n'' elements. * $orderDependent @@ -443,7 +447,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) + def takeRight(n: Int): C^{this} = fromSpecific(new View.TakeRight(this, n)) /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -451,11 +455,11 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) + def takeWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.TakeWhile(this, p)) - def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) + def span(p: A => Boolean): (C^{this, p}, C^{this, p}) = (takeWhile(p), dropWhile(p)) - def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) + def drop(n: Int): C^{this} = fromSpecific(new View.Drop(this, n)) /** Selects all elements except last ''n'' ones. * $orderDependent @@ -464,9 +468,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) + def dropRight(n: Int): C^{this} = fromSpecific(new View.DropRight(this, n)) - def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) + def dropWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.DropWhile(this, p)) /** Partitions elements in fixed size ${coll}s. * @see [[scala.collection.Iterator]], method `grouped` @@ -475,7 +479,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return An iterator producing ${coll}s of size `size`, except the * last will be less than size `size` if the elements don't divide evenly. */ - def grouped(size: Int): Iterator[C] = + def grouped(size: Int): Iterator[C^{this}]^{this} = iterator.grouped(size).map(fromSpecific) /** Groups elements in fixed size blocks by passing a "sliding window" @@ -497,7 +501,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` */ - def sliding(size: Int): Iterator[C] = sliding(size, 1) + def sliding(size: Int): Iterator[C^{this}]^{this} = sliding(size, 1) /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) @@ -516,13 +520,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * element (which may be the only element) will be smaller * if there are fewer than `size` elements remaining to be grouped. * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` - * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` */ - def sliding(size: Int, step: Int): Iterator[C] = + def sliding(size: Int, step: Int): Iterator[C^{this}]^{this} = iterator.sliding(size, step).map(fromSpecific) /** The rest of the collection without its first element. */ - def tail: C = { + def tail: C^{this} = { if (isEmpty) throw new UnsupportedOperationException drop(1) } @@ -530,12 +534,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** The initial part of the collection without its last element. * $willForceEvaluation */ - def init: C = { + def init: C^{this} = { if (isEmpty) throw new UnsupportedOperationException dropRight(1) } - def slice(from: Int, until: Int): C = + def slice(from: Int, until: Int): C^{this} = fromSpecific(new View.Drop(new View.Take(this, until), from)) /** Partitions this $coll into a map of ${coll}s according to some discriminator function. @@ -645,9 +649,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * * @return a new $coll containing the prefix scan of the elements in this $coll */ - def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) + def scan[B >: A](z: B)(op: (B, B) => B): CC[B]^{this, op} = scanLeft(z)(op) - def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) + def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} = iterableFactory.from(new View.ScanLeft(this, z, op)) /** Produces a collection containing cumulative results of applying the operator going right to left. * The head of the collection is the last cumulative result. @@ -665,7 +669,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { + def scanRight[B](z: B)(op: (A, B) => B): CC[B]^{this, op} = { class Scanner extends runtime.AbstractFunction1[A, Unit] { var acc = z var scanned = acc :: immutable.Nil @@ -679,13 +683,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable iterableFactory.from(scanner.scanned) } - def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) + def map[B](f: A => B): CC[B]^{this, f} = iterableFactory.from(new View.Map(this, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = iterableFactory.from(new View.FlatMap(this, f)) - def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) + def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} = flatMap(asIterable) - def collect[B](pf: PartialFunction[A, B]): CC[B] = + def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} = iterableFactory.from(new View.Collect(this, pf)) /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one @@ -706,12 +710,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @tparam A2 the element type of the second resulting collection * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] * - * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { - val left: View[A1] = new LeftPartitionMapped(this, f) - val right: View[A2] = new RightPartitionMapped(this, f) + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1]^{this, f}, CC[A2]^{this, f}) = { + val left: View[A1]^{f, this} = new LeftPartitionMapped(this, f) + val right: View[A2]^{f, this} = new RightPartitionMapped(this, f) (iterableFactory.from(left), iterableFactory.from(right)) } @@ -724,13 +728,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { + def concat[B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = iterableFactory.from(suffix match { case xs: Iterable[B] => new View.Concat(this, xs) case xs => iterator ++ suffix.iterator }) /** Alias for `concat` */ - @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = concat(suffix) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -741,12 +745,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. * The length of the returned collection is the minimum of the lengths of this $coll and `that`. */ - def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote + def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)]^{this, that} = iterableFactory.from(that match { // sound bcs of VarianceNote case that: Iterable[B] => new View.Zip(this, that) case _ => iterator.zip(that) }) - def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) + def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} = iterableFactory.from(new View.ZipWithIndex(this)) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -762,7 +766,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + def zipAll[A1 >: A, B](that: Iterable[B]^, thisElem: A1, thatElem: B): CC[(A1, B)]^{this, that} = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) /** Converts this $coll of pairs into two collections of the first and second * half of each pair. @@ -783,9 +787,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a pair of ${coll}s, containing the first, respectively second * half of each element pair of this $coll. */ - def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { - val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) - val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) + def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1]^{this}, CC[A2]^{this}) = { + val first: View[A1]^{this} = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2]^{this} = new View.Map[A, A2](this, asPair(_)._2) (iterableFactory.from(first), iterableFactory.from(second)) } @@ -810,10 +814,10 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a triple of ${coll}s, containing the first, second, respectively * third member of each element triple of this $coll. */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { - val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) - val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) - val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) + def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1]^{this}, CC[A2]^{this}, CC[A3]^{this}) = { + val first: View[A1]^{this} = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2]^{this} = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3]^{this} = new View.Map[A, A3](this, asTriple(_)._3) (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) } @@ -824,7 +828,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the tails of this $coll * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` */ - def tails: Iterator[C] = iterateUntilEmpty(_.tail) + def tails: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.tail) /** Iterates over the inits of this $coll. The first value will be this * $coll and the final one will be an empty $coll, with the intervening @@ -835,21 +839,24 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the inits of this $coll * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` */ - def inits: Iterator[C] = iterateUntilEmpty(_.init) + def inits: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.init) - override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + override def tapEach[U](f: A => U): C^{this, f} = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { + private[this] def iterateUntilEmpty(f: Iterable[A]^{this} => Iterable[A]^{this}): Iterator[C^{this}]^{this, f} = { // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` // `this.tail.tail` doesn't compile as `C` is unbounded // `Iterable.from(this)` would eagerly copy non-immutable collections - val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f) + .takeWhile((itble: Iterable[A]^) => itble.iterator.nonEmpty) + // CC TODO type annotation for itble needed. + // The previous code `.takeWhile(_.iterator.nonEmpty)` does not work. (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { + def ++:[B >: A](that: IterableOnce[B]^): CC[B]^{this, that} = iterableFactory.from(that match { case xs: Iterable[B] => new View.Concat(xs, this) case _ => that.iterator ++ iterator }) @@ -862,7 +869,8 @@ object IterableOps { * These operations are implemented in terms of * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. */ - final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]^) extends AnyVal { + this: SizeCompareOps^{it} => /** Tests if the size of the collection is less than some value. */ @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 /** Tests if the size of the collection is less than or equal to some value. */ @@ -887,22 +895,22 @@ object IterableOps { */ @SerialVersionUID(3L) class WithFilter[+A, +CC[_]]( - self: IterableOps[A, CC, _], + self: IterableOps[A, CC, _]^, p: A => Boolean ) extends collection.WithFilter[A, CC] with Serializable { - protected def filtered: Iterable[A] = + protected def filtered: Iterable[A]^{this} = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B] = + def map[B](f: A => B): CC[B]^{this, f} = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B] = + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): WithFilter[A, CC] = + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} = new WithFilter(self, (a: A) => p(a) && q(a)) } @@ -940,7 +948,7 @@ abstract class AbstractIterable[+A] extends Iterable[A] * same as `C`. */ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = iterableFactory.from(coll) protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] // overridden for efficiency, since we know CC[A] =:= C @@ -958,7 +966,7 @@ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends I trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] implicit protected def iterableEvidence: Ev[A @uncheckedVariance] - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = evidenceIterableFactory.from(coll) override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty } @@ -980,11 +988,11 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) - override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) } @@ -1004,7 +1012,8 @@ trait SortedSetFactoryDefaults[+A, trait MapFactoryDefaults[K, +V, +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) + this: MapFactoryDefaults[K, V, CC, WithFilterCC] => + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = mapFactory.from(coll) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) @@ -1012,7 +1021,7 @@ trait MapFactoryDefaults[K, +V, case _ => mapFactory.empty } - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC]^{p} = new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) } @@ -1035,9 +1044,9 @@ trait SortedMapFactoryDefaults[K, +V, self: IterableOps[(K, V), WithFilterCC, _] => override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) - override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) } diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index 65d8dce08ae4..a88be4943c58 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -14,12 +14,13 @@ package scala package collection import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.collection.mutable.StringBuilder import scala.language.implicitConversions import scala.math.{Numeric, Ordering} import scala.reflect.ClassTag import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking /** * A template trait for collections which can be traversed either once only @@ -42,8 +43,10 @@ import scala.runtime.AbstractFunction2 * @define coll collection */ trait IterableOnce[+A] extends Any { + this: IterableOnce[A]^ => + /** Iterator can be used only once */ - def iterator: Iterator[A] + def iterator: Iterator[A]^{this} /** Returns a [[scala.collection.Stepper]] for the elements of this collection. * @@ -65,9 +68,9 @@ trait IterableOnce[+A] extends Any { * allow creating parallel streams, whereas bare Steppers can be converted only to sequential * streams. */ - def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S = { + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = { import convert.impl._ - val s = shape.shape match { + val s: Any = shape.shape match { case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) @@ -84,7 +87,7 @@ trait IterableOnce[+A] extends Any { final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") - def withFilter(f: A => Boolean): Iterator[A] = it.iterator.withFilter(f) + def withFilter(f: A => Boolean): Iterator[A]^{f} = it.iterator.withFilter(f) @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) @@ -102,7 +105,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") - def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + def maxBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) @@ -120,7 +123,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) @deprecated("Use .iterator.minBy(...) instead", "2.13.0") - def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + def minBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) @deprecated("Use .iterator.size instead", "2.13.0") def size: Int = it.iterator.size @@ -132,7 +135,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) @deprecated("Use .iterator.filter(...) instead", "2.13.0") - def filter(f: A => Boolean): Iterator[A] = it.iterator.filter(f) + def filter(f: A => Boolean): Iterator[A]^{f} = it.iterator.filter(f) @deprecated("Use .iterator.exists(...) instead", "2.13.0") def exists(f: A => Boolean): Boolean = it.iterator.exists(f) @@ -159,10 +162,10 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") - def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) @deprecated("Use .iterator.toArray", "2.13.0") - def toArray[B >: A: ClassTag]: Array[B] = it match { + def toArray[sealed B >: A: ClassTag]: Array[B] = it match { case it: Iterable[B] => it.toArray[B] case _ => it.iterator.toArray[B] } @@ -238,13 +241,13 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") - def map[B](f: A => B): IterableOnce[B] = it match { + def map[B](f: A => B): IterableOnce[B]^{f} = it match { case it: Iterable[A] => it.map(f) case _ => it.iterator.map(f) } @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") - def flatMap[B](f: A => IterableOnce[B]): IterableOnce[B] = it match { + def flatMap[B](f: A => IterableOnce[B]^): IterableOnce[B]^{f} = it match { case it: Iterable[A] => it.flatMap(f) case _ => it.iterator.flatMap(f) } @@ -269,10 +272,11 @@ object IterableOnce { math.max(math.min(math.min(len, srcLen), destLen - start), 0) /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ - @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], - xs: Array[B], - start: Int = 0, - len: Int = Int.MaxValue): Int = + @inline private[collection] def copyElemsToArray[A, sealed B >: A]( + elems: IterableOnce[A]^, + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = elems match { case src: Iterable[A] => src.copyToArray[B](xs, start, len) case src => src.iterator.copyToArray[B](xs, start, len) @@ -315,9 +319,11 @@ object IterableOnce { * @define coll collection * */ -trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => /////////////////////////////////////////////////////////////// Abstract methods that must be implemented + import IterableOnceOps.Maximized + /** Produces a $coll containing cumulative results of applying the * operator going left to right, including the initial value. * @@ -329,7 +335,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanLeft[B](z: B)(op: (B, A) => B): CC[B] + def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} /** Selects all elements of this $coll which satisfy a predicate. * @@ -337,7 +343,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ - def filter(p: A => Boolean): C + def filter(p: A => Boolean): C^{this, p} /** Selects all elements of this $coll which do not satisfy a predicate. * @@ -345,7 +351,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll consisting of all elements of this $coll that do not satisfy the given * predicate `pred`. Their order may not be preserved. */ - def filterNot(pred: A => Boolean): C + def filterNot(p: A => Boolean): C^{this, p} /** Selects the first ''n'' elements. * $orderDependent @@ -354,7 +360,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def take(n: Int): C + def take(n: Int): C^{this} /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -362,7 +368,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C + def takeWhile(p: A => Boolean): C^{this, p} /** Selects all elements except first ''n'' ones. * $orderDependent @@ -371,7 +377,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def drop(n: Int): C + def drop(n: Int): C^{this} /** Drops longest prefix of elements that satisfy a predicate. * $orderDependent @@ -379,7 +385,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the longest suffix of this $coll whose first element * does not satisfy the predicate `p`. */ - def dropWhile(p: A => Boolean): C + def dropWhile(p: A => Boolean): C^{this, p} /** Selects an interval of elements. The returned $coll is made up * of all elements `x` which satisfy the invariant: @@ -394,7 +400,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * index `from` extending up to (but not including) index `until` * of this $coll. */ - def slice(from: Int, until: Int): C + def slice(from: Int, until: Int): C^{this} /** Builds a new $coll by applying a function to all elements of this $coll. * @@ -403,7 +409,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. */ - def map[B](f: A => B): CC[B] + def map[B](f: A => B): CC[B]^{this, f} /** Builds a new $coll by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -436,7 +442,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]): CC[B] + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} /** Converts this $coll of iterable collections into * a $coll formed by the elements of these iterable @@ -464,7 +470,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * type of this $coll is an `Iterable`. * @return a new $coll resulting from concatenating all element ${coll}s. */ - def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] + def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} /** Builds a new $coll by applying a partial function to all elements of this $coll * on which the function is defined. @@ -475,7 +481,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B](pf: PartialFunction[A, B]): CC[B] + def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} /** Zips this $coll with its indices. * @@ -484,7 +490,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @example * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` */ - def zipWithIndex: CC[(A @uncheckedVariance, Int)] + def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} /** Splits this $coll into a prefix/suffix pair according to a predicate. * @@ -497,7 +503,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a pair consisting of the longest prefix of this $coll whose * elements all satisfy `p`, and the rest of this $coll. */ - def span(p: A => Boolean): (C, C) + def span(p: A => Boolean): (C^{this, p}, C^{this, p}) /** Splits this $coll into a prefix/suffix pair at a given position. * @@ -509,7 +515,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a pair of ${coll}s consisting of the first `n` * elements of this $coll, and the other elements. */ - def splitAt(n: Int): (C, C) = { + def splitAt(n: Int): (C^{this}, C^{this}) = { class Spanner extends runtime.AbstractFunction1[A, Boolean] { var i = 0 def apply(a: A) = i < n && { i += 1 ; true } @@ -527,7 +533,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @tparam U the return type of f * @return The same logical collection as this */ - def tapEach[U](f: A => U): C + def tapEach[U](f: A => U): C^{this, f} /////////////////////////////////////////////////////////////// Concrete methods based on iterator @@ -802,7 +808,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => case _ => Some(reduceLeft(op)) } private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) - private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X])(op: (B, X) => B): Option[B] = { + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X]^)(op: (B, X) => B): Option[B] = { if (it.hasNext) { var acc: B = it.next() while (it.hasNext) @@ -884,7 +890,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -901,7 +907,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -918,7 +924,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * * @note Reuse: $consumesIterator */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val it = iterator var i = start val end = start + math.min(len, xs.length - start) @@ -1041,35 +1047,12 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the first element of this $coll with the largest value measured by function f * with respect to the ordering `cmp`. */ - def maxBy[B](f: A => B)(implicit ord: Ordering[B]): A = + def maxBy[B](f: A -> B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.maxBy") case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result } - private class Maximized[X, B](descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { - var maxElem: X = null.asInstanceOf[X] - var maxF: B = null.asInstanceOf[B] - var nonEmpty = false - def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None - def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") - def apply(m: Maximized[X, B], a: X): Maximized[X, B] = - if (m.nonEmpty) { - val fa = f(a) - if (cmp(fa, maxF)) { - maxF = fa - maxElem = a - } - m - } - else { - m.nonEmpty = true - m.maxElem = a - m.maxF = f(a) - m - } - } - /** Finds the first element which yields the largest value measured by function f. * * $willNotTerminateInf @@ -1080,7 +1063,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return an option value containing the first element of this $coll with the * largest value measured by function f with respect to the ordering `cmp`. */ - def maxByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + def maxByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption @@ -1097,7 +1080,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the first element of this $coll with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minBy[B](f: A => B)(implicit ord: Ordering[B]): A = + def minBy[B](f: A -> B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.minBy") case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result @@ -1114,7 +1097,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + def minByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption @@ -1310,7 +1293,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) @deprecated("Use .iterator instead of .toIterator", "2.13.0") - @`inline` final def toIterator: Iterator[A] = iterator + @`inline` final def toIterator: Iterator[A]^{this} = iterator def toList: immutable.List[A] = immutable.List.from(this) @@ -1330,13 +1313,13 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") @`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream) - @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + @`inline` final def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) /** Convert collection to array. * * Implementation note: DO NOT call [[Array.from]] from this method. */ - def toArray[B >: A: ClassTag]: Array[B] = + def toArray[sealed B >: A: ClassTag]: Array[B] = if (knownSize >= 0) { val destination = new Array[B](knownSize) copyToArray(destination, 0) @@ -1352,3 +1335,31 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => xs } } + +object IterableOnceOps: + + // Moved out of trait IterableOnceOps to here, since universal traits cannot + // have nested classes in Scala 3 + private class Maximized[X, B](descriptor: String)(f: X -> B)(cmp: (B, B) -> Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + var maxElem: X @uncheckedCaptures = null.asInstanceOf[X] + var maxF: B @uncheckedCaptures = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } +end IterableOnceOps \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 4b8338ed1b17..90fd387069b0 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -14,8 +14,11 @@ package scala.collection import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ImmutableBuilder} import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.runtime.Statics +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures + /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking @@ -71,7 +74,8 @@ import scala.runtime.Statics * iterators as well. * @define coll iterator */ -trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { self => +trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { + self: Iterator[A]^ => /** Check if there is a next element available. * @@ -93,7 +97,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @throws[NoSuchElementException] def next(): A - @inline final def iterator = this + @inline final def iterator: Iterator[A]^{this} = this /** Wraps the value of `next()` in an option. * @@ -117,7 +121,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return a buffered iterator producing the same values as this iterator. * @note Reuse: $consumesAndProducesIterator */ - def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { + def buffered: BufferedIterator[A]^{this} = new AbstractIterator[A] with BufferedIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -153,16 +157,16 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * A `GroupedIterator` is yielded by `grouped` and by `sliding`, * where the `step` may differ from the group `size`. */ - class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { + class GroupedIterator[B >: A](self: Iterator[B]^, size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: Array[B] = null // current result - private[this] var prev: Array[B] = null // if sliding, overlap from previous result + private[this] var buffer: Array[B @uncheckedCaptures] = null // current result + private[this] var prev: Array[B @uncheckedCaptures] = null // if sliding, overlap from previous result private[this] var first = true // if !first, advancing may skip ahead private[this] var filled = false // whether the buffer is "hot" private[this] var partial = true // whether to emit partial sequence - private[this] var padding: () => B = null // what to pad short sequences with + private[this] var padding: () -> B @uncheckedCaptures = null // what to pad short sequences with private[this] def pad = padding != null // irrespective of partial flag private[this] def newBuilder = { val b = ArrayBuilder.make[Any] @@ -185,7 +189,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial`. * @group Configuration */ - def withPadding(x: => B): this.type = { + def withPadding(x: -> B): this.type = { padding = () => x partial = true // redundant, as padding always results in complete segment this @@ -254,7 +258,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } // segment must have data, and must be complete unless they allow partial val ok = index > 0 && (partial || index == size) - if (ok) buffer = builder.result().asInstanceOf[Array[B]] + if (ok) buffer = builder.result().asInstanceOf[Array[B @uncheckedCaptures]] else prev = null ok } @@ -291,7 +295,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * all elements of this $coll followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A](len: Int, elem: B): Iterator[B] = new AbstractIterator[B] { + def padTo[B >: A](len: Int, elem: B): Iterator[B]^{this} = new AbstractIterator[B] { private[this] var i = 0 override def knownSize: Int = { @@ -321,7 +325,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * is the same as in the original iterator. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { + def partition(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { val (a, b) = duplicate (a filter p, b filterNot p) } @@ -341,7 +345,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def grouped[B >: A](size: Int): GroupedIterator[B] = + def grouped[B >: A](size: Int): GroupedIterator[B]^{this} = new GroupedIterator[B](self, size, size) /** Returns an iterator which presents a "sliding window" view of @@ -377,13 +381,13 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = + def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B]^{this} = new GroupedIterator[B](self, size, step) - def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { + def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B]^{this, op} = new AbstractIterator[B] { // We use an intermediate iterator that iterates through the first element `z` // and then that will be modified to iterate through the collection - private[this] var current: Iterator[B] = + private[this] var current: Iterator[B]^{self, op} = new AbstractIterator[B] { override def knownSize = { val thisSize = self.knownSize @@ -412,7 +416,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = ArrayBuffer.from(this).scanRight(z)(op).iterator + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = + ArrayBuffer.from[A @uncheckedCaptures](this).scanRight(z)(op).iterator + // @uncheckedCaptures is safe since the ArrayBuffer is local temporrary storage def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) @@ -465,11 +471,11 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @deprecatedOverriding("isEmpty is defined as !hasNext; override hasNext instead", "2.13.0") override def isEmpty: Boolean = !hasNext - def filter(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = false) + def filter(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = false) - def filterNot(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = true) + def filterNot(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = true) - private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A] = new AbstractIterator[A] { + private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -479,9 +485,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite while (p(hd) == isFlipped) { if (!self.hasNext) return false hd = self.next() - } + } hdDefined = true - true + true } def next() = @@ -503,9 +509,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ - def withFilter(p: A => Boolean): Iterator[A] = filter(p) + def withFilter(p: A => Boolean): Iterator[A]^{this, p} = filter(p) - def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { + def collect[B](pf: PartialFunction[A, B]^): Iterator[B]^{this, pf} = new AbstractIterator[B] with (A -> B) { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: B = _ @@ -541,7 +547,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinct: Iterator[A] = distinctBy(identity) + def distinct: Iterator[A]^{this} = distinctBy(identity) /** * Builds a new iterator from this one without any duplicated elements as determined by `==` after applying @@ -553,9 +559,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinctBy[B](f: A => B): Iterator[A] = new AbstractIterator[A] { + def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] val traversedValues = mutable.HashSet.empty[B @uncheckedCaptures] private[this] var nextElementDefined: Boolean = false private[this] var nextElement: A = _ @@ -578,14 +584,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { + def map[B](f: A => B): Iterator[B]^{this, f} = new AbstractIterator[B] { override def knownSize = self.knownSize def hasNext = self.hasNext def next() = f(self.next()) } - def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new AbstractIterator[B] { - private[this] var cur: Iterator[B] = Iterator.empty + def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new AbstractIterator[B] { + private[this] var cur: Iterator[B]^{f} = Iterator.empty /** Trillium logic boolean: -1 = unknown, 0 = false, 1 = true */ private[this] var _hasNext: Int = -1 @@ -619,19 +625,19 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def flatten[B](implicit ev: A => IterableOnce[B]): Iterator[B] = + def flatten[B](implicit ev: A -> IterableOnce[B]): Iterator[B]^{this} = flatMap[B](ev) - def concat[B >: A](xs: => IterableOnce[B]): Iterator[B] = new Iterator.ConcatIterator[B](self).concat(xs) + def concat[B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = new Iterator.ConcatIterator[B](self).concat(xs) - @`inline` final def ++ [B >: A](xs: => IterableOnce[B]): Iterator[B] = concat(xs) + @`inline` final def ++ [B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = concat(xs) - def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) + def take(n: Int): Iterator[A]^{this} = sliceIterator(0, n max 0) - def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { + def takeWhile(p: A => Boolean): Iterator[A]^{self, p} = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false - private[this] var tail: Iterator[A] = self + private[this] var tail: Iterator[A]^{self} = self def hasNext = hdDefined || tail.hasNext && { hd = tail.next() @@ -642,9 +648,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - def drop(n: Int): Iterator[A] = sliceIterator(n, -1) + def drop(n: Int): Iterator[A]^{this} = sliceIterator(n, -1) - def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { + def dropWhile(p: A => Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator private[this] var status = -1 // Local buffering to avoid double-wrap with .buffered @@ -680,7 +686,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesOneAndProducesTwoIterators */ - def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { + def span(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { /* * Giving a name to following iterator (as opposed to trailing) because * anonymous class is represented as a structural type that trailing @@ -698,7 +704,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ private[this] var status = 0 private def store(a: A): Unit = { - if (lookahead == null) lookahead = new mutable.Queue[A] + if (lookahead == null) lookahead = new mutable.Queue[A @uncheckedCaptures] lookahead += a } def hasNext = { @@ -779,10 +785,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite (leading, trailing) } - def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) + def slice(from: Int, until: Int): Iterator[A]^{this} = sliceIterator(from, until max 0) /** Creates an optionally bounded slice, unbounded if `until` is negative. */ - protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { val lo = from max 0 val rest = if (until < 0) -1 // unbounded @@ -793,14 +799,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite else new Iterator.SliceIterator(this, lo, rest) } - def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { + def zip[B](that: IterableOnce[B]^): Iterator[(A, B)]^{this, that} = new AbstractIterator[(A, B)] { val thatIterator = that.iterator override def knownSize = self.knownSize min thatIterator.knownSize def hasNext = self.hasNext && thatIterator.hasNext def next() = (self.next(), thatIterator.next()) } - def zipAll[A1 >: A, B](that: IterableOnce[B], thisElem: A1, thatElem: B): Iterator[(A1, B)] = new AbstractIterator[(A1, B)] { + def zipAll[A1 >: A, B](that: IterableOnce[B]^, thisElem: A1, thatElem: B): Iterator[(A1, B)]^{this, that} = new AbstractIterator[(A1, B)] { val thatIterator = that.iterator override def knownSize = { val thisSize = self.knownSize @@ -817,7 +823,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { + def zipWithIndex: Iterator[(A, Int)]^{this} = new AbstractIterator[(A, Int)] { var idx = 0 override def knownSize = self.knownSize def hasNext = self.hasNext @@ -837,7 +843,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @inheritdoc */ - def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { val those = that.iterator while (hasNext && those.hasNext) if (next() != those.next()) @@ -860,9 +866,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * iterated by one iterator but not yet by the other. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def duplicate: (Iterator[A], Iterator[A]) = { - val gap = new scala.collection.mutable.Queue[A] - var ahead: Iterator[A] = null + def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { + val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures] + var ahead: Iterator[A @uncheckedCaptures] = null // ahead is captured by Partner, so A is not recognized as parametric class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { val thisSize = self.knownSize @@ -904,7 +910,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @param replaced The number of values in the original iterator that are replaced by the patch. * @note Reuse: $consumesTwoAndProducesOneIterator */ - def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = + def patch[B >: A](from: Int, patchElems: Iterator[B]^, replaced: Int): Iterator[B]^{this, patchElems} = new AbstractIterator[B] { private[this] var origElems = self // > 0 => that many more elems from `origElems` before switching to `patchElems` @@ -944,7 +950,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - override def tapEach[U](f: A => U): Iterator[A] = new AbstractIterator[A] { + override def tapEach[U](f: A => U): Iterator[A]^{this, f} = new AbstractIterator[A] { override def knownSize = self.knownSize override def hasNext = self.hasNext override def next() = { @@ -981,7 +987,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - override def from[A](source: IterableOnce[A]): Iterator[A] = source.iterator + override def from[A](source: IterableOnce[A]^): Iterator[A]^{source} = source.iterator /** The iterator which produces no values. */ @`inline` final def empty[T]: Iterator[T] = _empty @@ -1012,7 +1018,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation * @return An iterator that produces the results of `n` evaluations of `elem`. */ - override def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { + override def fill[A](len: Int)(elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (len - i) max 0 def hasNext: Boolean = i < len @@ -1027,7 +1033,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f The function computing element values * @return An iterator that produces the values `f(0), ..., f(n -1)`. */ - override def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { + override def tabulate[A](end: Int)(f: Int => A): Iterator[A]^{f} = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (end - i) max 0 def hasNext: Boolean = i < end @@ -1100,7 +1106,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f the function that's repeatedly applied * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { + def iterate[T](start: T)(f: T => T): Iterator[T]^{f} = new AbstractIterator[T] { private[this] var first = true private[this] var acc = start def hasNext: Boolean = true @@ -1122,7 +1128,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam S Type of the internal state * @return an Iterator that produces elements using `f` until `f` returns `None` */ - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A] = new UnfoldIterator(init)(f) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A]^{f} = new UnfoldIterator(init)(f) /** Creates an infinite-length iterator returning the results of evaluating an expression. * The expression is recomputed for every element. @@ -1130,7 +1136,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation. * @return the iterator containing an infinite number of results of evaluating `elem`. */ - def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { + def continually[A](elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { def hasNext = true def next() = elem } @@ -1138,9 +1144,10 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator to which other iterators can be appended efficiently. * Nested ConcatIterators are merged to avoid blowing the stack. */ - private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance]) extends AbstractIterator[A] { - private var tail: ConcatIteratorCell[A @uncheckedVariance] = null - private var last: ConcatIteratorCell[A @uncheckedVariance] = null + private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { + private var current: Iterator[A @uncheckedCaptures]^{cap[ConcatIterator]} = from + private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null + private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var currentHasNextChecked = false def hasNext = @@ -1194,8 +1201,8 @@ object Iterator extends IterableFactory[Iterator] { current.next() } else Iterator.empty.next() - override def concat[B >: A](that: => IterableOnce[B]): Iterator[B] = { - val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] + override def concat[B >: A](that: => IterableOnce[B]^): Iterator[B]^{this, that} = { + val c: ConcatIteratorCell[A] = new ConcatIteratorCell[B](that, null).asInstanceOf if (tail == null) { tail = c last = c @@ -1209,14 +1216,14 @@ object Iterator extends IterableFactory[Iterator] { } } - private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A]) { - def headIterator: Iterator[A] = head.iterator + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^, var tail: ConcatIteratorCell[A @uncheckedCaptures]) { + def headIterator: Iterator[A]^{this} = head.iterator // CC todo: can't use {head} as capture set, gives "cannot establish a reference" } /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. */ - private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { + private[scala] final class SliceIterator[A](val underlying: Iterator[A]^, start: Int, limit: Int) extends AbstractIterator[A] { private[this] var remaining = limit private[this] var dropping = start @inline private def unbounded = remaining < 0 @@ -1247,7 +1254,7 @@ object Iterator extends IterableFactory[Iterator] { else if (unbounded) underlying.next() else empty.next() } - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{underlying} = { val lo = from max 0 def adjustedBound = if (unbounded) -1 @@ -1269,9 +1276,9 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator that uses a function `f` to produce elements of * type `A` and update an internal state of type `S`. */ - private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)]) extends AbstractIterator[A] { - private[this] var state: S = init - private[this] var nextResult: Option[(A, S)] = null + private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)])extends AbstractIterator[A] { + private[this] var state: S @uncheckedCaptures = init + private[this] var nextResult: Option[(A, S)] @uncheckedCaptures = null override def hasNext: Boolean = { if (nextResult eq null) { @@ -1297,4 +1304,5 @@ object Iterator extends IterableFactory[Iterator] { } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ -abstract class AbstractIterator[+A] extends Iterator[A] +abstract class AbstractIterator[+A] extends Iterator[A]: + this: Iterator[A]^ => diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala index 569e4e8c60a7..69130eae1829 100644 --- a/tests/pos-special/stdlib/collection/JavaConverters.scala +++ b/tests/pos-special/stdlib/collection/JavaConverters.scala @@ -17,6 +17,7 @@ import java.{lang => jl, util => ju} import scala.collection.convert._ import scala.language.implicitConversions +import language.experimental.captureChecking /** A variety of decorators that enable converting between * Scala and Java collections using extension methods, `asScala` and `asJava`. diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala index 0553eb8edf7f..1bb4173d219f 100644 --- a/tests/pos-special/stdlib/collection/LazyZipOps.scala +++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala @@ -13,6 +13,7 @@ package scala.collection import scala.language.implicitConversions +import language.experimental.captureChecking /** Decorator representing lazily zipped pairs. * @@ -21,7 +22,7 @@ import scala.language.implicitConversions * * Note: will not terminate for infinite-sized collections. */ -final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1], coll2: Iterable[El2]) { +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) { /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. @@ -31,7 +32,7 @@ final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterabl * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip3[El1, El2, B, C1] = new LazyZip3(src, coll1, coll2, that) + def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that) def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { bf.fromSpecific(src)(new AbstractView[B] { @@ -147,9 +148,9 @@ object LazyZip2 { * Note: will not terminate for infinite-sized collections. */ final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, - coll1: Iterable[El1], - coll2: Iterable[El2], - coll3: Iterable[El3]) { + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^) { /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. @@ -159,7 +160,7 @@ final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip4[El1, El2, El3, B, C1] = new LazyZip4(src, coll1, coll2, coll3, that) + def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that) def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { bf.fromSpecific(src)(new AbstractView[B] { @@ -288,10 +289,10 @@ object LazyZip3 { * Note: will not terminate for infinite-sized collections. */ final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, - coll1: Iterable[El1], - coll2: Iterable[El2], - coll3: Iterable[El3], - coll4: Iterable[El4]) { + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^, + coll4: Iterable[El4]^) { def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { bf.fromSpecific(src)(new AbstractView[B] { diff --git a/tests/pos-special/stdlib/collection/LinearSeq.scala b/tests/pos-special/stdlib/collection/LinearSeq.scala index 449d58c866e3..393f5fda4187 100644 --- a/tests/pos-special/stdlib/collection/LinearSeq.scala +++ b/tests/pos-special/stdlib/collection/LinearSeq.scala @@ -14,6 +14,7 @@ package scala package collection import scala.annotation.{nowarn, tailrec} +import language.experimental.captureChecking /** Base trait for linearly accessed sequences that have efficient `head` and * `tail` operations. @@ -32,7 +33,7 @@ trait LinearSeq[+A] extends Seq[A] object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) /** Base trait for linear Seq operations */ -trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] { +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends AnyRef with SeqOps[A, CC, C] { /** @inheritdoc * @@ -96,7 +97,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq else loop(0, coll) } - override def lengthCompare(that: Iterable[_]): Int = { + override def lengthCompare(that: Iterable[_]^): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this lengthCompare thatKnownSize @@ -186,7 +187,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq acc } - override def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + override def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = (a eq b) || { if (a.nonEmpty && b.nonEmpty && a.head == b.head) { @@ -259,7 +260,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq } } -trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends AnyRef with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { // A more efficient iterator implementation than the default LinearSeqIterator override def iterator: Iterator[A] = new AbstractIterator[A] { private[this] var current = StrictOptimizedLinearSeqOps.this diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index 0fb6df9a06dc..8ab25a3c13e0 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -17,13 +17,16 @@ import scala.annotation.nowarn import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** Base Map type */ trait Map[K, +V] extends Iterable[(K, V)] with MapOps[K, V, Map, Map[K, V]] with MapFactoryDefaults[K, V, Map, Iterable] - with Equals { + with Equals + with Pure { def mapFactory: scala.collection.MapFactory[Map] = Map @@ -101,8 +104,9 @@ trait Map[K, +V] trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends IterableOps[(K, V), Iterable, C] with PartialFunction[K, V] { + this: MapOps[K, V, CC, C]^ => - override def view: MapView[K, V] = new MapView.Id(this) + override def view: MapView[K, V]^{this} = new MapView.Id(this) /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { @@ -131,7 +135,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] /** Similar to `fromIterable`, but returns a Map collection type. * Note that the return type is now `CC[K2, V2]`. */ - @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]): CC[K2, V2] = mapFactory.from(it) + @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]^): CC[K2, V2] = mapFactory.from(it) /** The companion object of this map, providing various factory methods. * @@ -251,7 +255,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * the predicate `p`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. @@ -259,7 +263,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) /** Defines the default value computation for the map, * returned when a key is not found @@ -318,7 +322,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -328,7 +332,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { + def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): CC[K, V2] = mapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -336,7 +340,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] // Not final because subclasses refine the result type, e.g. in SortedMap, the result type is // SortedMap's CC, while Map's CC is fixed to Map /** Alias for `concat` */ - /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = iterator.map { case (k, v) => s"$k -> $v" }.addString(sb, start, sep, end) @@ -350,14 +354,14 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) @deprecated("Consider requiring an immutable Map.", "2.13.0") - @`inline` def -- (keys: IterableOnce[K]): C = { + @`inline` def -- (keys: IterableOnce[K]^): C = { lazy val keysSet = keys.iterator.to(immutable.Set) - fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++: [V1 >: V](that: IterableOnce[(K,V1)]): CC[K,V1] = { - val thatIterable: Iterable[(K, V1)] = that match { + def ++: [V1 >: V](that: IterableOnce[(K,V1)]^): CC[K,V1] = { + val thatIterable: Iterable[(K, V1)]^{that} = that match { case that: Iterable[(K, V1)] => that case that => View.from(that) } @@ -373,17 +377,17 @@ object MapOps { */ @SerialVersionUID(3L) class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( - self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _], + self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^, p: ((K, V)) => Boolean ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala index 7f84178a7c16..ac9e88466052 100644 --- a/tests/pos-special/stdlib/collection/MapView.scala +++ b/tests/pos-special/stdlib/collection/MapView.scala @@ -15,54 +15,57 @@ package scala.collection import scala.annotation.nowarn import scala.collection.MapView.SomeMapOps import scala.collection.mutable.Builder +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure trait MapView[K, +V] extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] with View[(K, V)] { + this: MapView[K, V]^ => - override def view: MapView[K, V] = this + override def view: MapView[K, V]^{this} = this // Ideally this returns a `View`, but bincompat /** Creates a view over all keys of this map. * * @return the keys of this map as a view. */ - override def keys: Iterable[K] = new MapView.Keys(this) + override def keys: Iterable[K]^{this} = new MapView.Keys(this) // Ideally this returns a `View`, but bincompat /** Creates a view over all values of this map. * * @return the values of this map as a view. */ - override def values: Iterable[V] = new MapView.Values(this) + override def values: Iterable[V]^{this} = new MapView.Values(this) /** Filters this map by retaining only keys satisfying a predicate. * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - override def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. * @return a map view which maps every key of this map * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ - override def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) - override def filter(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, false, pred) + override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred) - override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, true, pred) + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred) - override def partition(p: ((K, V)) => Boolean): (MapView[K, V], MapView[K, V]) = (filter(p), filterNot(p)) + override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p)) - override def tapEach[U](f: ((K, V)) => U): MapView[K, V] = new MapView.TapEach(this, f) + override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f) def mapFactory: MapViewFactory = MapView override def empty: MapView[K, V] = mapFactory.empty - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l] = new MapOps.WithFilter(this, p) + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p) override def toString: String = super[View].toString @@ -78,7 +81,9 @@ object MapView extends MapViewFactory { type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] @SerialVersionUID(3L) - private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + object EmptyMapView extends AbstractMapView[Any, Nothing] { + // !!! cc problem: crash when we replace the line with + // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { override def get(key: Any): Option[Nothing] = None override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] override def knownSize: Int = 0 @@ -91,48 +96,48 @@ object MapView extends MapViewFactory { } @SerialVersionUID(3L) - class Id[K, +V](underlying: SomeMapOps[K, V]) extends AbstractMapView[K, V] { + class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] { def get(key: K): Option[V] = underlying.get(key) - def iterator: Iterator[(K, V)] = underlying.iterator + def iterator: Iterator[(K, V)]^{this} = underlying.iterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } // Ideally this is public, but bincompat @SerialVersionUID(3L) - private class Keys[K](underlying: SomeMapOps[K, _]) extends AbstractView[K] { - def iterator: Iterator[K] = underlying.keysIterator + private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] { + def iterator: Iterator[K]^{this} = underlying.keysIterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } // Ideally this is public, but bincompat @SerialVersionUID(3L) - private class Values[+V](underlying: SomeMapOps[_, V]) extends AbstractView[V] { - def iterator: Iterator[V] = underlying.valuesIterator + private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] { + def iterator: Iterator[V]^{this} = underlying.valuesIterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class MapValues[K, +V, +W](underlying: SomeMapOps[K, V], f: V => W) extends AbstractMapView[K, W] { - def iterator: Iterator[(K, W)] = underlying.iterator.map(kv => (kv._1, f(kv._2))) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2))) def get(key: K): Option[W] = underlying.get(key).map(f) override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class FilterKeys[K, +V](underlying: SomeMapOps[K, V], p: K => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)] = underlying.iterator.filter { case (k, _) => p(k) } + class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) } def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } @SerialVersionUID(3L) - class Filter[K, +V](underlying: SomeMapOps[K, V], isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)] = underlying.iterator.filterImpl(p, isFlipped) + class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped) def get(key: K): Option[V] = underlying.get(key) match { case s @ Some(v) if p((key, v)) != isFlipped => s case _ => None @@ -142,7 +147,7 @@ object MapView extends MapViewFactory { } @SerialVersionUID(3L) - class TapEach[K, +V, +U](underlying: SomeMapOps[K, V], f: ((K, V)) => U) extends AbstractMapView[K, V] { + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] { override def get(key: K): Option[V] = { underlying.get(key) match { case s @ Some(v) => @@ -151,18 +156,21 @@ object MapView extends MapViewFactory { case None => None } } - override def iterator: Iterator[(K, V)] = underlying.iterator.tapEach(f) + override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f) override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } - override def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + override def newBuilder[sealed X, sealed Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] - override def from[K, V](it: IterableOnce[(K, V)]): View[(K, V)] = View.from(it) + override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] = + View.from(it).unsafeAssumePure + // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory, + // and the latter assumes maps are strict, so from's result captures nothing. - override def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] = it match { + override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match { case mv: MapView[K, V] => mv case other => new MapView.Id(other) } @@ -176,12 +184,13 @@ trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y) def empty[X, Y]: MapView[X, Y] - def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] + def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) } /** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ @SerialVersionUID(3L) -abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V] +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]: + this: AbstractMapView[K, V]^ => diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala index 874a06449aa9..f5139422e24c 100644 --- a/tests/pos-special/stdlib/collection/Searching.scala +++ b/tests/pos-special/stdlib/collection/Searching.scala @@ -14,6 +14,7 @@ package scala.collection import scala.language.implicitConversions import scala.collection.generic.IsSeq +import language.experimental.captureChecking object Searching { diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index d960838fdcb7..365a1db1b849 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -16,6 +16,9 @@ import scala.collection.immutable.Range import scala.util.hashing.MurmurHash3 import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures /** Base trait for sequence collections * @@ -27,6 +30,7 @@ trait Seq[+A] with SeqOps[A, Seq, Seq[A]] with IterableFactoryDefaults[A, Seq] with Equals { + this: Seq[A] => override def iterableFactory: SeqFactory[Seq] = Seq @@ -74,11 +78,12 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any - with IterableOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) + def iterableFactory: FreeSeqFactory[CC] + /** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should * not be assumed to be efficient unless you have an `IndexedSeq`. */ @throws[IndexOutOfBoundsException] @@ -160,13 +165,13 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return a new $coll which contains all elements of `prefix` followed * by all the elements of this $coll. */ - def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = iterableFactory.from(prefix match { + def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = iterableFactory.from(prefix match { case prefix: Iterable[B] => new View.Concat(prefix, this) case _ => prefix.iterator ++ iterator }) /** Alias for `prependedAll` */ - @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]): CC[B] = prependedAll(prefix) + @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]^): CC[B] = prependedAll(prefix) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -177,14 +182,15 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return a new collection of type `CC[B]` which contains all elements * of this $coll followed by all elements of `suffix`. */ - def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = super.concat(suffix) + def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = + super.concat(suffix).unsafeAssumePure /** Alias for `appendedAll` */ - @`inline` final def :++ [B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) + @`inline` final def :++ [B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) // Make `concat` an alias for `appendedAll` so that it benefits from performance // overrides of this method - @`inline` final override def concat[B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) + @`inline` final override def concat[B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) /** Produces a new sequence which contains all elements of this $coll and also all elements of * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. @@ -212,7 +218,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @tparam B the type of the elements after being transformed by `f` * @return a new $coll consisting of all the elements of this $coll without duplicates. */ - def distinctBy[B](f: A => B): C = fromSpecific(new View.DistinctBy(this, f)) + def distinctBy[B](f: A -> B): C = fromSpecific(new View.DistinctBy(this, f)) /** Returns new $coll with elements in reversed order. * @@ -231,7 +237,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * * @return an iterator yielding the elements of this $coll in reversed order */ - def reverseIterator: Iterator[A] = reversed.iterator + override def reverseIterator: Iterator[A] = reversed.iterator /** Tests whether this $coll contains the given sequence at a given index. * @@ -243,7 +249,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return `true` if the sequence `that` is contained in this $coll at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = { + def startsWith[B >: A](that: IterableOnce[B]^, offset: Int = 0): Boolean = { val i = iterator drop offset val j = that.iterator while (j.hasNext && i.hasNext) @@ -258,7 +264,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Iterable[B]): Boolean = { + def endsWith[B >: A](that: Iterable[B]^): Boolean = { if (that.isEmpty) true else { val i = iterator.drop(length - that.size) @@ -595,7 +601,8 @@ trait SeqOps[+A, +CC[_], +C] extends Any if (!hasNext) Iterator.empty.next() - val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms + val forcedElms = new mutable.ArrayBuffer[A @uncheckedCaptures](elms.size) ++= elms + // uncheckedCaptures OK since used only locally val result = (newSpecificBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) @@ -628,6 +635,9 @@ trait SeqOps[+A, +CC[_], +C] extends Any private[this] def init() = { val m = mutable.HashMap[A, Int]() + //val s1 = self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) + //val s2: Seq[(A, Int)] = s1 sortBy (_._2) + //val (es, is) = s2.unzip(using Predef.$conforms[(A, Int)]) val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip (es.to(mutable.ArrayBuffer), is.toArray) @@ -807,7 +817,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any */ def lengthCompare(len: Int): Int = super.sizeCompare(len) - override final def sizeCompare(that: Iterable[_]): Int = lengthCompare(that) + override final def sizeCompare(that: Iterable[_]^): Int = lengthCompare(that) /** Compares the length of this $coll to the size of another `Iterable`. * @@ -822,7 +832,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * is `O(this.length min that.size)` instead of `O(this.length + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def lengthCompare(that: Iterable[_]): Int = super.sizeCompare(that) + def lengthCompare(that: Iterable[_]^): Int = super.sizeCompare(that) /** Returns a value class containing operations for comparing the length of this $coll to a test value. * @@ -845,7 +855,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any /** Are the elements of this collection the same (and in the same order) * as those of `that`? */ - def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { val thisKnownSize = knownSize val knownSizeDifference = thisKnownSize != -1 && { val thatKnownSize = that.knownSize @@ -883,7 +893,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * part of the result, but any following occurrences will. */ def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) fromSpecific(iterator.filter { x => var include = false occ.updateWith(x) { @@ -908,7 +918,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * in the result, but any following occurrences will be omitted. */ def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) fromSpecific(iterator.filter { x => var include = true occ.updateWith(x) { @@ -937,7 +947,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * except that `replaced` elements starting from `from` are replaced * by all the elements of `other`. */ - def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = iterableFactory.from(new View.Patched(this, from, other, replaced)) /** A copy of this $coll with one single replaced element. @@ -956,7 +966,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any iterableFactory.from(new View.Updated(this, index, elem)) } - protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { + protected[collection] def occCounts[sealed B](sq: Seq[B]): mutable.Map[B, Int] = { val occ = new mutable.HashMap[B, Int]() for (y <- sq) occ.updateWith(y) { case None => Some(1) @@ -1004,11 +1014,11 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return a `Found` value containing the index corresponding to the element in the * sequence, or the `InsertionPoint` where the element would be inserted if * the element is not in the sequence. - * + * * @note if `to <= from`, the search space is empty, and an `InsertionPoint` at `from` * is returned */ - def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = + def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = linearSearch(view.slice(from, to), elem, math.max(0, from))(ord) private[this] def linearSearch[B >: A](c: View[A], elem: B, offset: Int) diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala index 05bf126aba02..a7f2c629b61d 100644 --- a/tests/pos-special/stdlib/collection/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/SeqMap.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking import scala.annotation.nowarn diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala index ad16f01b9184..a4ca1143f8b4 100644 --- a/tests/pos-special/stdlib/collection/SeqView.scala +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -14,26 +14,49 @@ package scala package collection import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures + +/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the + * necessary functionality over which SeqViews are defined, and at the same + * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is + * pure, whereas SeqViews are Iterables which can be impure (for instance, + * mapping a SeqView with an impure function gives an impure view). + */ +trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { + self: SeqViewOps[A, CC, C]^ => + + def length: Int + def apply(x: Int): A + def appended[B >: A](elem: B): CC[B]^{this} + def prepended[B >: A](elem: B): CC[B]^{this} + def reverse: C^{this} + def sorted[B >: A](implicit ord: Ordering[B]): C^{this} + + def reverseIterator: Iterator[A]^{this} = reversed.iterator +} +trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { + self: SeqView[A]^ => -trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { - override def view: SeqView[A] = this + override def view: SeqView[A]^{this} = this - override def map[B](f: A => B): SeqView[B] = new SeqView.Map(this, f) - override def appended[B >: A](elem: B): SeqView[B] = new SeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): SeqView[B] = new SeqView.Prepended(elem, this) - override def reverse: SeqView[A] = new SeqView.Reverse(this) - override def take(n: Int): SeqView[A] = new SeqView.Take(this, n) - override def drop(n: Int): SeqView[A] = new SeqView.Drop(this, n) - override def takeRight(n: Int): SeqView[A] = new SeqView.TakeRight(this, n) - override def dropRight(n: Int): SeqView[A] = new SeqView.DropRight(this, n) - override def tapEach[U](f: A => U): SeqView[A] = new SeqView.Map(this, { (a: A) => f(a); a }) + override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a }) - def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(prefix, this) + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this) - override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A] = new SeqView.Sorted(this, ord) + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix: String = "SeqView" @@ -42,38 +65,38 @@ trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { object SeqView { /** A `SeqOps` whose collection type and collection type constructor are unknown */ - private type SomeSeqOps[+A] = SeqOps[A, AnyConstr, _] + private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _] /** A view that doesn’t apply any transformation to an underlying sequence */ @SerialVersionUID(3L) - class Id[+A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { def apply(idx: Int): A = underlying.apply(idx) def length: Int = underlying.length - def iterator: Iterator[A] = underlying.iterator + def iterator: Iterator[A]^{this} = underlying.iterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeSeqOps[A], f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { def apply(idx: Int): B = f(underlying(idx)) def length: Int = underlying.length } @SerialVersionUID(3L) - class Appended[+A](underlying: SomeSeqOps[A], elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] { def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) def length: Int = underlying.length + 1 } @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeSeqOps[A]) extends View.Prepended(elem, underlying) with SeqView[A] { + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] { def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) def length: Int = underlying.length + 1 } @SerialVersionUID(3L) - class Concat[A](prefix: SomeSeqOps[A], suffix: SomeSeqOps[A]) extends View.Concat[A](prefix, suffix) with SeqView[A] { + class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] { def apply(idx: Int): A = { val l = prefix.length if (idx < l) prefix(idx) else suffix(idx - l) @@ -82,16 +105,16 @@ object SeqView { } @SerialVersionUID(3L) - class Reverse[A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { def apply(i: Int) = underlying.apply(size - 1 - i) def length = underlying.size - def iterator: Iterator[A] = underlying.reverseIterator + def iterator: Iterator[A]^{this} = underlying.reverseIterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class Take[+A](underlying: SomeSeqOps[A], n: Int) extends View.Take(underlying, n) with SeqView[A] { + class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] { def apply(idx: Int): A = if (idx < n) { underlying(idx) } else { @@ -101,7 +124,7 @@ object SeqView { } @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeSeqOps[A], n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { private[this] val delta = (underlying.size - (n max 0)) max 0 def length = underlying.size - delta @throws[IndexOutOfBoundsException] @@ -109,15 +132,15 @@ object SeqView { } @SerialVersionUID(3L) - class Drop[A](underlying: SomeSeqOps[A], n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { def length = (underlying.size - normN) max 0 @throws[IndexOutOfBoundsException] def apply(i: Int) = underlying.apply(i + normN) - override def drop(n: Int): SeqView[A] = new Drop(underlying, this.n + n) + override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n) } @SerialVersionUID(3L) - class DropRight[A](underlying: SomeSeqOps[A], n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { private[this] val len = (underlying.size - (n max 0)) max 0 def length = len @throws[IndexOutOfBoundsException] @@ -125,15 +148,15 @@ object SeqView { } @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A], + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, private[this] val len: Int, ord: Ordering[B]) extends SeqView[A] { - outer => + outer: Sorted[A, B]^ => // force evaluation immediately by calling `length` so infinite collections // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls - def this(underlying: SomeSeqOps[A], ord: Ordering[B]) = this(underlying, underlying.length, ord) + def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) @SerialVersionUID(3L) private[this] class ReverseSorted extends SeqView[A] { @@ -141,15 +164,15 @@ object SeqView { def apply(i: Int): A = _reversed.apply(i) def length: Int = len - def iterator: Iterator[A] = Iterator.empty ++ _reversed.iterator // very lazy + def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy override def knownSize: Int = len override def isEmpty: Boolean = len == 0 override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) - override def reverse: SeqView[A] = outer - override protected def reversed: Iterable[A] = outer + override def reverse: SeqView[A]^{outer} = outer + override protected def reversed: Iterable[A] = outer.unsafeAssumePure - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = - if (ord1 == Sorted.this.ord) outer + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = + if (ord1 == Sorted.this.ord) outer.unsafeAssumePure else if (ord1.isReverseOf(Sorted.this.ord)) this else new Sorted(elems, len, ord1) } @@ -173,7 +196,7 @@ object SeqView { // contains items of another type, we'd get a CCE anyway) // - the cast doesn't actually do anything in the runtime because the // type of A is not known and Array[_] is Array[AnyRef] - immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A @uncheckedCaptures]]) } } evaluated = true @@ -181,14 +204,14 @@ object SeqView { res } - private[this] def elems: SomeSeqOps[A] = { + private[this] def elems: SomeSeqOps[A]^{this} = { val orig = underlying if (evaluated) _sorted else orig } def apply(i: Int): A = _sorted.apply(i) def length: Int = len - def iterator: Iterator[A] = Iterator.empty ++ _sorted.iterator // very lazy + def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy override def knownSize: Int = len override def isEmpty: Boolean = len == 0 override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) @@ -197,7 +220,7 @@ object SeqView { // so this is acceptable for `reversed` override protected def reversed: Iterable[A] = new ReverseSorted - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = if (ord1 == this.ord) this else if (ord1.isReverseOf(this.ord)) reverse else new Sorted(elems, len, ord1) diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala index 0ea1e5689473..a9c279b82a49 100644 --- a/tests/pos-special/stdlib/collection/Set.scala +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -17,6 +17,7 @@ import scala.util.hashing.MurmurHash3 import java.lang.String import scala.annotation.nowarn +import language.experimental.captureChecking /** Base trait for set collections. */ @@ -24,7 +25,9 @@ trait Set[A] extends Iterable[A] with SetOps[A, Set, Set[A]] with Equals - with IterableFactoryDefaults[A, Set] { + with IterableFactoryDefaults[A, Set] + with Pure { + self: Set[A] => def canEqual(that: Any) = true @@ -86,8 +89,7 @@ trait Set[A] * @define Coll `Set` */ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends IterableOps[A, CC, C] - with (A => Boolean) { + extends IterableOps[A, CC, C], (A -> Boolean) { self => def contains(elem: A): Boolean @@ -234,7 +236,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] case that: collection.Iterable[A] => new View.Concat(this, that) case _ => iterator.concat(that.iterator) }) - } + } @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala index 03ab0bb0dadc..7b9381ebb078 100644 --- a/tests/pos-special/stdlib/collection/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/SortedMap.scala @@ -14,6 +14,7 @@ package scala package collection import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking /** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ trait SortedMap[K, +V] @@ -49,7 +50,8 @@ trait SortedMap[K, +V] trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] extends MapOps[K, V, Map, C] - with SortedOps[K, C] { + with SortedOps[K, C] + with Pure { /** The companion object of this sorted map, providing various factory methods. * @@ -176,13 +178,13 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(new View.Collect(this, pf)) - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) })(ordering) /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) @@ -206,10 +208,10 @@ object SortedMapOps { def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = self.sortedMapFactory.from(new View.Map(filtered, f)) - def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = self.sortedMapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} = new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala index 64e6376be042..16751d86d9d5 100644 --- a/tests/pos-special/stdlib/collection/SortedOps.scala +++ b/tests/pos-special/stdlib/collection/SortedOps.scala @@ -12,6 +12,7 @@ package scala.collection +import language.experimental.captureChecking /** Base trait for sorted collections */ trait SortedOps[A, +C] { diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala index c98ca9ae5523..fb2f879edcd2 100644 --- a/tests/pos-special/stdlib/collection/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -14,6 +14,7 @@ package scala.collection import scala.annotation.{implicitNotFound, nowarn} import scala.annotation.unchecked.uncheckedVariance +import language.experimental.captureChecking /** Base type of sorted sets */ trait SortedSet[A] extends Set[A] @@ -68,7 +69,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * @param start The lower-bound (inclusive) of the iterator */ def iteratorFrom(start: A): Iterator[A] - + @deprecated("Use `iteratorFrom` instead.", "2.13.0") @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) @@ -178,7 +179,7 @@ object SortedSetOps { def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} = new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) } diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala index 0eeb8a44cb72..0a0ac0075990 100644 --- a/tests/pos-special/stdlib/collection/Stepper.scala +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -15,6 +15,7 @@ package scala.collection import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} import java.{lang => jl} +import language.experimental.captureChecking import scala.collection.Stepper.EfficientSplit @@ -38,6 +39,8 @@ import scala.collection.Stepper.EfficientSplit * @tparam A the element type of the Stepper */ trait Stepper[@specialized(Double, Int, Long) +A] { + this: Stepper[A]^ => + /** Check if there's an element available. */ def hasStep: Boolean @@ -183,9 +186,11 @@ object Stepper { /** A Stepper for arbitrary element types. See [[Stepper]]. */ trait AnyStepper[+A] extends Stepper[A] { + this: AnyStepper[A]^ => + def trySplit(): AnyStepper[A] - def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this) def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { def hasNext: Boolean = hasStep @@ -194,10 +199,10 @@ trait AnyStepper[+A] extends Stepper[A] { } object AnyStepper { - class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] { def tryAdvance(c: Consumer[_ >: A]): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false - def trySplit(): Spliterator[A] = { + def trySplit(): Spliterator[A]^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -253,9 +258,11 @@ object AnyStepper { /** A Stepper for Ints. See [[Stepper]]. */ trait IntStepper extends Stepper[Int] { + this: IntStepper^ => + def trySplit(): IntStepper - def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this) def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { def hasNext: Boolean = hasStep @@ -263,7 +270,7 @@ trait IntStepper extends Stepper[Int] { } } object IntStepper { - class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt { def tryAdvance(c: IntConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -272,7 +279,7 @@ object IntStepper { case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfInt = { + override def trySplit(): Spliterator.OfInt^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -291,18 +298,19 @@ object IntStepper { /** A Stepper for Doubles. See [[Stepper]]. */ trait DoubleStepper extends Stepper[Double] { + this: DoubleStepper^ => def trySplit(): DoubleStepper - def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this) - def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble { def hasNext: Boolean = hasStep def nextDouble(): Double = nextStep() } } object DoubleStepper { - class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble { def tryAdvance(c: DoubleConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -311,7 +319,7 @@ object DoubleStepper { case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfDouble = { + override def trySplit(): Spliterator.OfDouble^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -330,18 +338,20 @@ object DoubleStepper { /** A Stepper for Longs. See [[Stepper]]. */ trait LongStepper extends Stepper[Long] { - def trySplit(): LongStepper + this: LongStepper^ => + + def trySplit(): LongStepper^{this} - def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this) - def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong { def hasNext: Boolean = hasStep def nextLong(): Long = nextStep() } } object LongStepper { - class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong { def tryAdvance(c: LongConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -350,7 +360,7 @@ object LongStepper { case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfLong = { + override def trySplit(): Spliterator.OfLong^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala index 6712073b09e4..c6b520400d89 100644 --- a/tests/pos-special/stdlib/collection/StepperShape.scala +++ b/tests/pos-special/stdlib/collection/StepperShape.scala @@ -14,12 +14,13 @@ package scala.collection import java.{lang => jl} +import language.experimental.captureChecking import scala.collection.Stepper.EfficientSplit /** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly * specialized Stepper `S` according to the element type `T`. */ -sealed trait StepperShape[-T, S <: Stepper[_]] { +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure { /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ def shape: StepperShape.Shape diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala index a09766cfa912..5b504a2469b5 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala @@ -16,6 +16,7 @@ package collection import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics +import language.experimental.captureChecking /** * Trait that overrides iterable operations to take advantage of strict builders. @@ -27,6 +28,7 @@ import scala.runtime.Statics trait StrictOptimizedIterableOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { + this: StrictOptimizedIterableOps[A, CC, C] => // Optimized, push-based version of `partition` override def partition(p: A => Boolean): (C, C) = { @@ -55,7 +57,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1], CC[A2]) = { val first = iterableFactory.newBuilder[A1] val second = iterableFactory.newBuilder[A2] foreach { a => @@ -66,7 +68,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { val b1 = iterableFactory.newBuilder[A1] val b2 = iterableFactory.newBuilder[A2] val b3 = iterableFactory.newBuilder[A3] @@ -102,7 +104,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatMap[B](f: A => IterableOnce[B]): CC[B] = + override def flatMap[B](f: A => IterableOnce[B]^): CC[B] = strictOptimizedFlatMap(iterableFactory.newBuilder, f) /** @@ -112,7 +114,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]): C2 = { + @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]^): C2 = { val it = iterator while (it.hasNext) { b ++= f(it.next()) @@ -127,13 +129,13 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B], b: mutable.Builder[B, C2]): C2 = { + @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B]^, b: mutable.Builder[B, C2]): C2 = { b ++= this b ++= that b.result() } - override def collect[B](pf: PartialFunction[A, B]): CC[B] = + override def collect[B](pf: PartialFunction[A, B]^): CC[B] = strictOptimizedCollect(iterableFactory.newBuilder, pf) /** @@ -143,7 +145,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]): C2 = { + @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]^): C2 = { val marker = Statics.pfMarker val it = iterator while (it.hasNext) { @@ -154,7 +156,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatten[B](implicit toIterableOnce: A => IterableOnce[B]): CC[B] = + override def flatten[B](implicit toIterableOnce: A -> IterableOnce[B]): CC[B] = strictOptimizedFlatten(iterableFactory.newBuilder) /** @@ -164,7 +166,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A => IterableOnce[B]): C2 = { + @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A -> IterableOnce[B]): C2 = { val it = iterator while (it.hasNext) { b ++= toIterableOnce(it.next()) @@ -172,7 +174,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = + override def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)] = strictOptimizedZip(that, iterableFactory.newBuilder[(A, B)]) /** @@ -182,7 +184,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[(Int, String)]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B], b: mutable.Builder[(A, B), C2]): C2 = { + @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B]^, b: mutable.Builder[(A, B), C2]): C2 = { val it1 = iterator val it2 = that.iterator while (it1.hasNext && it2.hasNext) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala index 1f5791bbb718..a9c5e0af43b3 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** * Trait that overrides map operations to take advantage of strict builders. @@ -22,15 +23,16 @@ package scala.collection */ trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends MapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), Iterable, C] { + with StrictOptimizedIterableOps[(K, V), Iterable, C] + with Pure { override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = strictOptimizedMap(mapFactory.newBuilder, f) - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = strictOptimizedFlatMap(mapFactory.newBuilder, f) - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = strictOptimizedConcat(suffix, mapFactory.newBuilder) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 396e53885081..bfea9eda8bd3 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -11,6 +11,8 @@ */ package scala.collection +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations on sequences in order @@ -21,9 +23,9 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { - override def distinctBy[B](f: A => B): C = { + override def distinctBy[B](f: A -> B): C = { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B] + val seen = mutable.HashSet.empty[B @uncheckedCaptures] val it = this.iterator while (it.hasNext) { val next = it.next() @@ -52,10 +54,10 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] b.result() } - override def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = + override def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = strictOptimizedConcat(suffix, iterableFactory.newBuilder) - override def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = { + override def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = { val b = iterableFactory.newBuilder[B] b ++= prefix b ++= this @@ -78,7 +80,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def diff[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) coll else { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { @@ -96,7 +98,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def intersect[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) empty else { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala index 356bd2883578..8ed337fff998 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** * Trait that overrides set operations to take advantage of strict builders. diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala index 1beaf1662abe..9a9e6e367922 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala @@ -13,6 +13,7 @@ package scala.collection import scala.annotation.implicitNotFound +import language.experimental.captureChecking /** * Trait that overrides sorted map operations to take advantage of strict builders. @@ -32,7 +33,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOp override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) - override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index f0be485af8ae..3e3e2f8d872e 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -22,6 +22,7 @@ import scala.collection.mutable.StringBuilder import scala.math.{ScalaNumber, max, min} import scala.reflect.ClassTag import scala.util.matching.Regex +import language.experimental.captureChecking object StringOps { // just statics for companion class. @@ -123,7 +124,7 @@ object StringOps { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -155,7 +156,7 @@ object StringOps { } /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: Char => Boolean): WithFilter = new WithFilter(a => p(a) && q(a), s) + def withFilter(q: Char => Boolean): WithFilter^{p, q} = new WithFilter(a => p(a) && q(a), s) } /** Avoid an allocation in [[collect]]. */ @@ -238,7 +239,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -313,7 +314,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection which contains all chars * of this string followed by all elements of `suffix`. */ - def concat[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = { + def concat[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = suffix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -329,7 +330,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string which contains all chars * of this string followed by all chars of `suffix`. */ - def concat(suffix: IterableOnce[Char]): String = { + def concat(suffix: IterableOnce[Char]^): String = { val k = suffix.knownSize val sb = new JStringBuilder(s.length + (if(k >= 0) k else 16)) sb.append(s) @@ -347,10 +348,10 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def concat(suffix: String): String = s + suffix /** Alias for `concat` */ - @`inline` def ++[B >: Char](suffix: Iterable[B]): immutable.IndexedSeq[B] = concat(suffix) + @`inline` def ++[B >: Char](suffix: Iterable[B]^): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `concat` */ - @`inline` def ++(suffix: IterableOnce[Char]): String = concat(suffix) + @`inline` def ++(suffix: IterableOnce[Char]^): String = concat(suffix) /** Alias for `concat` */ def ++(xs: String): String = concat(xs) @@ -422,7 +423,7 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def +: (c: Char): String = prepended(c) /** A copy of the string with all elements from a collection prepended */ - def prependedAll[B >: Char](prefix: IterableOnce[B]): immutable.IndexedSeq[B] = { + def prependedAll[B >: Char](prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = prefix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -432,7 +433,7 @@ final class StringOps(private val s: String) extends AnyVal { } /** Alias for `prependedAll` */ - @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]): immutable.IndexedSeq[B] = prependedAll(prefix) + @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = prependedAll(prefix) /** A copy of the string with another string prepended */ def prependedAll(prefix: String): String = prefix + s @@ -460,11 +461,11 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def :+ (c: Char): String = appended(c) /** A copy of the string with all elements from a collection appended */ - @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = + @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `appendedAll` */ - @`inline` def :++ [B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = + @`inline` def :++ [B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = concat(suffix) /** A copy of the string with another string appended */ @@ -486,7 +487,7 @@ final class StringOps(private val s: String) extends AnyVal { * except that `replaced` chars starting from `from` are replaced * by `other`. */ - def patch[B >: Char](from: Int, other: IterableOnce[B], replaced: Int): immutable.IndexedSeq[B] = { + def patch[B >: Char](from: Int, other: IterableOnce[B]^, replaced: Int): immutable.IndexedSeq[B] = { val len = s.length @`inline` def slc(off: Int, length: Int): WrappedString = new WrappedString(s.substring(off, off+length)) @@ -515,7 +516,7 @@ final class StringOps(private val s: String) extends AnyVal { * by `other`. * @note $unicodeunaware */ - def patch(from: Int, other: IterableOnce[Char], replaced: Int): String = + def patch(from: Int, other: IterableOnce[Char]^, replaced: Int): String = patch(from, other.iterator.mkString, replaced) /** Produces a new string where a slice of characters in this string is replaced by another string. @@ -963,7 +964,7 @@ final class StringOps(private val s: String) extends AnyVal { else if (s.equalsIgnoreCase("false")) false else throw new IllegalArgumentException("For input string: \""+s+"\"") - def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = + def toArray[sealed B >: Char](implicit tag: ClassTag[B]): Array[B] = if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] else new WrappedString(s).toArray[B] @@ -1195,7 +1196,7 @@ final class StringOps(private val s: String) extends AnyVal { * All these operations apply to those chars of this string * which satisfy the predicate `p`. */ - def withFilter(p: Char => Boolean): StringOps.WithFilter = new StringOps.WithFilter(p, s) + def withFilter(p: Char => Boolean): StringOps.WithFilter^{p} = new StringOps.WithFilter(p, s) /** The rest of the string without its first char. * @note $unicodeunaware @@ -1246,7 +1247,7 @@ final class StringOps(private val s: String) extends AnyVal { def inits: Iterator[String] = iterateUntilEmpty(_.init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: String => String): Iterator[String] = + private[this] def iterateUntilEmpty(f: String => String): Iterator[String]^{f} = Iterator.iterate(s)(f).takeWhile(x => !x.isEmpty) ++ Iterator.single("") /** Selects all chars of this string which satisfy a predicate. */ @@ -1464,7 +1465,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip2[Char, B, String] = new LazyZip2(s, new WrappedString(s), that) + def lazyZip[B](that: Iterable[B]^): LazyZip2[Char, B, String]^{that} = new LazyZip2(s, new WrappedString(s), that) /* ************************************************************************************************************ @@ -1512,7 +1513,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string consisting of all the chars of this string without duplicates. * @note $unicodeunaware */ - def distinctBy[B](f: Char => B): String = new WrappedString(s).distinctBy(f).unwrap + def distinctBy[B](f: Char -> B): String = new WrappedString(s).distinctBy(f).unwrap /** Sorts the characters of this string according to an Ordering. * diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala index 5479a58d485f..47281815da71 100644 --- a/tests/pos-special/stdlib/collection/StringParsers.scala +++ b/tests/pos-special/stdlib/collection/StringParsers.scala @@ -14,6 +14,7 @@ package scala package collection import scala.annotation.tailrec +import language.experimental.captureChecking /** A module containing the implementations of parsers from strings to numeric types, and boolean */ @@ -34,7 +35,7 @@ private[scala] object StringParsers { @inline private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { @tailrec - def rec(i: Int, agg: Int): Option[Int] = + def rec(i: Int, agg: Int): Option[Int] = if (agg < min) None else if (i == len) { if (!isPositive) Some(agg) @@ -131,11 +132,11 @@ private[scala] object StringParsers { else None } } - + final def parseLong(from: String): Option[Long] = { //like parseInt, but Longer val len = from.length() - + @tailrec def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { if (i == len) { @@ -166,7 +167,7 @@ private[scala] object StringParsers { else None } } - + //floating point final def checkFloatFormat(format: String): Boolean = { //indices are tracked with a start index which points *at* the first index @@ -192,7 +193,7 @@ private[scala] object StringParsers { else i rec(from) } - + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || @@ -231,7 +232,7 @@ private[scala] object StringParsers { val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) } - + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { //invariant: endIndex > startIndex @@ -278,7 +279,7 @@ private[scala] object StringParsers { //count 0x00 to 0x20 as "whitespace", and nothing else val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 - + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false else { //all formats can have a sign @@ -305,7 +306,7 @@ private[scala] object StringParsers { } } } - + @inline def parseFloat(from: String): Option[Float] = if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index 441790c3c6e5..d91fc0c49939 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -15,6 +15,8 @@ package scala.collection import scala.annotation.{nowarn, tailrec} import scala.collection.mutable.{ArrayBuffer, Builder} import scala.collection.immutable.LazyList +import scala.annotation.unchecked.uncheckedCaptures +import language.experimental.captureChecking /** Views are collections whose transformation operations are non strict: the resulting elements * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), @@ -23,8 +25,9 @@ import scala.collection.immutable.LazyList * @define Coll `View` */ trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { + this: View[A]^ => - override def view: View[A] = this + override def view: View[A]^{this} = this override def iterableFactory: IterableFactory[View] = View @@ -55,8 +58,8 @@ object View extends IterableFactory[View] { * * @tparam A View element type */ - def fromIteratorProvider[A](it: () => Iterator[A]): View[A] = new AbstractView[A] { - def iterator = it() + def fromIteratorProvider[A](it: () => Iterator[A]^): View[A]^{it} = new AbstractView[A] { + def iterator: Iterator[A]^{it} = it() } /** @@ -67,7 +70,7 @@ object View extends IterableFactory[View] { * * @tparam E View element type */ - def from[E](it: IterableOnce[E]): View[E] = it match { + def from[E](it: IterableOnce[E]^): View[E]^{it} = it match { case it: View[E] => it case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) case _ => LazyList.from(it).view @@ -75,7 +78,7 @@ object View extends IterableFactory[View] { def empty[A]: View[A] = Empty - def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + def newBuilder[sealed A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) override def apply[A](xs: A*): View[A] = new Elems(xs: _*) @@ -97,7 +100,7 @@ object View extends IterableFactory[View] { /** A view with given elements */ @SerialVersionUID(3L) - class Elems[A](xs: A*) extends AbstractView[A] { + class Elems[A](xs: A*) extends AbstractView[A], Pure { def iterator = xs.iterator override def knownSize = xs.knownSize override def isEmpty: Boolean = xs.isEmpty @@ -106,7 +109,7 @@ object View extends IterableFactory[View] { /** A view containing the results of some element computation a number of times. */ @SerialVersionUID(3L) class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { - def iterator = Iterator.fill(n)(elem) + def iterator: Iterator[A]^{elem} = Iterator.fill(n)(elem) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -114,7 +117,7 @@ object View extends IterableFactory[View] { /** A view containing values of a given function over a range of integer values starting from 0. */ @SerialVersionUID(3L) class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.tabulate(n)(f) + def iterator: Iterator[A]^{f} = Iterator.tabulate(n)(f) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -122,7 +125,7 @@ object View extends IterableFactory[View] { /** A view containing repeated applications of a function to a start value */ @SerialVersionUID(3L) class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.iterate(start)(f).take(len) + def iterator: Iterator[A]^{f} = Iterator.iterate(start)(f).take(len) override def knownSize: Int = 0 max len override def isEmpty: Boolean = len <= 0 } @@ -132,7 +135,7 @@ object View extends IterableFactory[View] { */ @SerialVersionUID(3L) class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.unfold(initial)(f) + def iterator: Iterator[A]^{f} = Iterator.unfold(initial)(f) } /** An `IterableOps` whose collection type and collection type constructor are unknown */ @@ -140,14 +143,14 @@ object View extends IterableFactory[View] { /** A view that filters an underlying collection. */ @SerialVersionUID(3L) - class Filter[A](val underlying: SomeIterableOps[A], val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { - def iterator = underlying.iterator.filterImpl(p, isFlipped) + class Filter[A](val underlying: SomeIterableOps[A]^, val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.filterImpl(p, isFlipped) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } object Filter { - def apply[A](underlying: Iterable[A], p: A => Boolean, isFlipped: Boolean): Filter[A] = + def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = underlying match { case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) case _ => new Filter(underlying, p, isFlipped) @@ -156,15 +159,15 @@ object View extends IterableFactory[View] { /** A view that removes the duplicated elements as determined by the transformation function `f` */ @SerialVersionUID(3L) - class DistinctBy[A, B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.distinctBy(f) + class DistinctBy[A, B](underlying: SomeIterableOps[A]^, f: A -> B) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.distinctBy(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A1] { - def iterator = new AbstractIterator[A1] { + class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A1] { + def iterator: Iterator[A1]^{underlying, f} = new AbstractIterator[A1] { private[this] val self = underlying.iterator private[this] var hd: A1 = _ private[this] var hdDefined: Boolean = false @@ -188,8 +191,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A2] { - def iterator = new AbstractIterator[A2] { + class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A2] { + def iterator: Iterator[A2]^{this} = new AbstractIterator[A2] { private[this] val self = underlying.iterator private[this] var hd: A2 = _ private[this] var hdDefined: Boolean = false @@ -214,8 +217,8 @@ object View extends IterableFactory[View] { /** A view that drops leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Drop[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = underlying.iterator.drop(n) + class Drop[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.drop(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -226,8 +229,8 @@ object View extends IterableFactory[View] { /** A view that drops trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = dropRightIterator(underlying.iterator, n) + class DropRight[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = dropRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -239,16 +242,16 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class DropWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { - def iterator = underlying.iterator.dropWhile(p) + class DropWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.dropWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that takes leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Take[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = underlying.iterator.take(n) + class Take[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.take(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -259,8 +262,8 @@ object View extends IterableFactory[View] { /** A view that takes trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = takeRightIterator(underlying.iterator, n) + class TakeRight[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = takeRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -272,15 +275,15 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class TakeWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.takeWhile(p) + class TakeWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.takeWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } @SerialVersionUID(3L) - class ScanLeft[+A, +B](underlying: SomeIterableOps[A], z: B, op: (B, A) => B) extends AbstractView[B] { - def iterator: Iterator[B] = underlying.iterator.scanLeft(z)(op) + class ScanLeft[+A, +B](underlying: SomeIterableOps[A]^, z: B, op: (B, A) => B) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, op} = underlying.iterator.scanLeft(z)(op) override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -290,32 +293,32 @@ object View extends IterableFactory[View] { /** A view that maps elements of the underlying collection. */ @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[B] { - def iterator = underlying.iterator.map(f) + class Map[+A, +B](underlying: SomeIterableOps[A]^, f: A => B) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, f} = underlying.iterator.map(f) override def knownSize = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } /** A view that flatmaps elements of the underlying collection. */ @SerialVersionUID(3L) - class FlatMap[A, B](underlying: SomeIterableOps[A], f: A => IterableOnce[B]) extends AbstractView[B] { - def iterator = underlying.iterator.flatMap(f) + class FlatMap[A, B](underlying: SomeIterableOps[A]^, f: A => IterableOnce[B]^) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, f} = underlying.iterator.flatMap(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that collects elements of the underlying collection. */ @SerialVersionUID(3L) - class Collect[+A, B](underlying: SomeIterableOps[A], pf: PartialFunction[A, B]) extends AbstractView[B] { - def iterator = underlying.iterator.collect(pf) + class Collect[+A, B](underlying: SomeIterableOps[A]^, pf: PartialFunction[A, B]^) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, pf} = underlying.iterator.collect(pf) } /** A view that concatenates elements of the prefix collection or iterator with the elements * of the suffix collection or iterator. */ @SerialVersionUID(3L) - class Concat[A](prefix: SomeIterableOps[A], suffix: SomeIterableOps[A]) extends AbstractView[A] { - def iterator = prefix.iterator ++ suffix.iterator + class Concat[A](prefix: SomeIterableOps[A]^, suffix: SomeIterableOps[A]^) extends AbstractView[A] { + def iterator: Iterator[A]^{prefix, suffix} = prefix.iterator ++ suffix.iterator override def knownSize = { val prefixSize = prefix.knownSize if (prefixSize >= 0) { @@ -332,8 +335,8 @@ object View extends IterableFactory[View] { * of another collection. */ @SerialVersionUID(3L) - class Zip[A, B](underlying: SomeIterableOps[A], other: Iterable[B]) extends AbstractView[(A, B)] { - def iterator = underlying.iterator.zip(other) + class Zip[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^) extends AbstractView[(A, B)] { + def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zip(other) override def knownSize = { val s1 = underlying.knownSize if (s1 == 0) 0 else { @@ -349,8 +352,8 @@ object View extends IterableFactory[View] { * placeholder elements are used to extend the shorter collection to the length of the longer. */ @SerialVersionUID(3L) - class ZipAll[A, B](underlying: SomeIterableOps[A], other: Iterable[B], thisElem: A, thatElem: B) extends AbstractView[(A, B)] { - def iterator = underlying.iterator.zipAll(other, thisElem, thatElem) + class ZipAll[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^, thisElem: A, thatElem: B) extends AbstractView[(A, B)] { + def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zipAll(other, thisElem, thatElem) override def knownSize = { val s1 = underlying.knownSize if(s1 == -1) -1 else { @@ -363,8 +366,10 @@ object View extends IterableFactory[View] { /** A view that appends an element to its elements */ @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIterableOps[A], elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = new Concat(underlying, new View.Single(elem)).iterator + class Appended[+A](underlying: SomeIterableOps[A]^, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = + val ct = new Concat(underlying, new View.Single(elem)) + ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -374,8 +379,10 @@ object View extends IterableFactory[View] { /** A view that prepends an element to its elements */ @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIterableOps[A]) extends AbstractView[A] { - def iterator: Iterator[A] = new Concat(new View.Single(elem), underlying).iterator + class Prepended[+A](elem: A, underlying: SomeIterableOps[A]^) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = + val ct = new Concat(new View.Single(elem), underlying) + ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -384,8 +391,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class Updated[A](underlying: SomeIterableOps[A], index: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = new AbstractIterator[A] { + class Updated[A](underlying: SomeIterableOps[A]^, index: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = new AbstractIterator[A] { private[this] val it = underlying.iterator private[this] var i = 0 def next(): A = { @@ -403,28 +410,28 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { + private[collection] class Patched[A](underlying: SomeIterableOps[A]^, from: Int, other: IterableOnce[A]^, replaced: Int) extends AbstractView[A] { // we may be unable to traverse `other` more than once, so we need to cache it if that's the case - private val _other: Iterable[A] = other match { + private val _other: Iterable[A]^{other} = other match { case other: Iterable[A] => other case other => LazyList.from(other) } - def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) + def iterator: Iterator[A]^{underlying, other} = underlying.iterator.patch(from, _other.iterator, replaced) override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty } @SerialVersionUID(3L) - class ZipWithIndex[A](underlying: SomeIterableOps[A]) extends AbstractView[(A, Int)] { - def iterator: Iterator[(A, Int)] = underlying.iterator.zipWithIndex + class ZipWithIndex[A](underlying: SomeIterableOps[A]^) extends AbstractView[(A, Int)] { + def iterator: Iterator[(A, Int)]^{underlying} = underlying.iterator.zipWithIndex override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class PadTo[A](underlying: SomeIterableOps[A], len: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.padTo(len, elem) + class PadTo[A](underlying: SomeIterableOps[A]^, len: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.padTo(len, elem) override def knownSize: Int = { val size = underlying.knownSize @@ -433,7 +440,7 @@ object View extends IterableFactory[View] { override def isEmpty: Boolean = underlying.isEmpty && len <= 0 } - private[collection] def takeRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { + private[collection] def takeRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { val k = it.knownSize if(k == 0 || n <= 0) Iterator.empty else if(n == Int.MaxValue) it @@ -441,22 +448,23 @@ object View extends IterableFactory[View] { else new TakeRightIterator[A](it, n) } - private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private final class TakeRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { + private[this] var current: Iterator[A @uncheckedCaptures]^{underlying} = underlying private[this] var len: Int = -1 private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ def init(): Unit = if(buf eq null) { buf = new ArrayBuffer[AnyRef](maxlen min 256) len = 0 - while(underlying.hasNext) { - val n = underlying.next().asInstanceOf[AnyRef] + while(current.hasNext) { + val n = current.next().asInstanceOf[AnyRef] if(pos >= buf.length) buf.addOne(n) else buf(pos) = n pos += 1 if(pos == maxlen) pos = 0 len += 1 } - underlying = null + current = null if(len > maxlen) len = maxlen pos = pos - len if(pos < 0) pos += maxlen @@ -477,7 +485,7 @@ object View extends IterableFactory[View] { x } } - override def drop(n: Int): Iterator[A] = { + override def drop(n: Int): Iterator[A]^{this} = { init() if (n > 0) { len = (len - n) max 0 @@ -487,7 +495,7 @@ object View extends IterableFactory[View] { } } - private[collection] def dropRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { + private[collection] def dropRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { if(n <= 0) it else { val k = it.knownSize @@ -496,7 +504,7 @@ object View extends IterableFactory[View] { } } - private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private final class DropRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala index 4699abbef5a7..0f3830e9fe25 100644 --- a/tests/pos-special/stdlib/collection/WithFilter.scala +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods * of trait `Iterable`. @@ -22,6 +23,7 @@ package scala.collection */ @SerialVersionUID(3L) abstract class WithFilter[+A, +CC[_]] extends Serializable { + this: WithFilter[A, CC]^ => /** Builds a new collection by applying a function to all elements of the * `filtered` outer $coll. @@ -32,7 +34,7 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * the given function `f` to each element of the filtered outer $coll * and collecting the results. */ - def map[B](f: A => B): CC[B] + def map[B](f: A => B): CC[B]^{this, f} /** Builds a new collection by applying a function to all elements of the * `filtered` outer $coll containing this `WithFilter` instance that satisfy @@ -44,7 +46,7 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * of the filtered outer $coll and * concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]): CC[B] + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} /** Applies a function `f` to all elements of the `filtered` outer $coll. * @@ -65,6 +67,6 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * All these operations apply to those elements of this $coll which * also satisfy both `p` and `q` predicates. */ - def withFilter(q: A => Boolean): WithFilter[A, CC] + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} } diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala index c2b996b93102..d985dad2edc5 100644 --- a/tests/pos-special/stdlib/collection/concurrent/Map.scala +++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala @@ -13,6 +13,7 @@ package scala package collection.concurrent +import language.experimental.captureChecking import scala.annotation.tailrec /** A template trait for mutable maps that allow concurrent access. diff --git a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala index e4aa8c8c52a7..0824ecc44519 100644 --- a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala +++ b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala @@ -23,8 +23,10 @@ import scala.collection.immutable.{List, Nil} import scala.collection.mutable.GrowableBuilder import scala.util.Try import scala.util.hashing.Hashing +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure -private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { +private[collection] final class INode[sealed K, sealed V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { import INodeBase._ WRITE(bn) @@ -427,7 +429,7 @@ private[concurrent] object INode { final val KEY_ABSENT = new AnyRef final val KEY_PRESENT_OR_ABSENT = new AnyRef - def newRootNode[K, V](equiv: Equiv[K]) = { + def newRootNode[sealed K, sealed V](equiv: Equiv[K]) = { val gen = new Gen val cn = new CNode[K, V](0, new Array(0), gen) new INode[K, V](cn, gen, equiv) @@ -435,7 +437,7 @@ private[concurrent] object INode { } -private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { +private[concurrent] final class FailedNode[sealed K, sealed V](p: MainNode[K, V]) extends MainNode[K, V] { WRITE_PREV(p) def string(lev: Int) = throw new UnsupportedOperationException @@ -448,12 +450,12 @@ private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends Main } -private[concurrent] trait KVNode[K, V] { +private[concurrent] trait KVNode[sealed K, sealed V] { def kvPair: (K, V) } -private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) +private[collection] final class SNode[sealed K, sealed V](final val k: K, final val v: V, final val hc: Int) extends BasicNode with KVNode[K, V] { def copy = new SNode(k, v, hc) def copyTombed = new TNode(k, v, hc) @@ -463,7 +465,7 @@ private[collection] final class SNode[K, V](final val k: K, final val v: V, fina } // Tomb Node, used to ensure proper ordering during removals -private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) +private[collection] final class TNode[sealed K, sealed V](final val k: K, final val v: V, final val hc: Int) extends MainNode[K, V] with KVNode[K, V] { def copy = new TNode(k, v, hc) def copyTombed = new TNode(k, v, hc) @@ -475,7 +477,7 @@ private[collection] final class TNode[K, V](final val k: K, final val v: V, fina } // List Node, leaf node that handles hash collisions -private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) +private[collection] final class LNode[sealed K, sealed V](val entries: List[(K, V)], equiv: Equiv[K]) extends MainNode[K, V] { def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) @@ -517,7 +519,7 @@ private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Eq } // Ctrie Node, contains bitmap and array of references to branch nodes -private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { +private[collection] final class CNode[sealed K, sealed V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { // this should only be called from within read-only snapshots def cachedSize(ct: AnyRef): Int = { val currsz = READ_SIZE() @@ -653,7 +655,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba private[concurrent] object CNode { - def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { + def dual[sealed K, sealed V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { val xidx = (xhc >>> lev) & 0x1f val yidx = (yhc >>> lev) & 0x1f val bmp = (1 << xidx) | (1 << yidx) @@ -688,7 +690,7 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] */ @SerialVersionUID(-5212455458703321708L) -final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) +final class TrieMap[sealed K, sealed V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) extends scala.collection.mutable.AbstractMap[K, V] with scala.collection.concurrent.Map[K, V] with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] @@ -1017,10 +1019,10 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - override def filterKeys(p: K => Boolean): collection.MapView[K, V] = view.filterKeys(p) + override def filterKeys(p: K => Boolean): collection.MapView[K, V]^{p} = view.filterKeys(p) @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - override def mapValues[W](f: V => W): collection.MapView[K, W] = view.mapValues(f) + override def mapValues[W](f: V => W): collection.MapView[K, W]^{f} = view.mapValues(f) // END extra overrides /////////////////////////////////////////////////////////////////// @@ -1041,11 +1043,11 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater @SerialVersionUID(3L) object TrieMap extends MapFactory[TrieMap] { - def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] + def empty[sealed K, sealed V]: TrieMap[K, V] = new TrieMap[K, V] - def from[K, V](it: IterableOnce[(K, V)]): TrieMap[K, V] = new TrieMap[K, V]() ++= it + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): TrieMap[K, V] = new TrieMap[K, V]() ++= it - def newBuilder[K, V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) + def newBuilder[sealed K, sealed V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) @transient val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") @@ -1069,7 +1071,7 @@ object TrieMap extends MapFactory[TrieMap] { } // non-final as an extension point for parallel collections -private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { +private[collection] class TrieMapIterator[sealed K, sealed V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { private val stack = new Array[Array[BasicNode]](7) private val stackpos = new Array[Int](7) private var depth = -1 @@ -1182,7 +1184,10 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: stack(d) = arr1 stackpos(d) = -1 val it = newIterator(level + 1, ct, _mustInit = false) - it.stack(0) = arr2 + val xss: Array[Array[BasicNode]] = it.stack.asInstanceOf + // !!! cc split into separate xss and asInstanceOf needed because cc gets confused with + // two-dimensinal invariant arrays + xss(0) = arr2 it.stackpos(0) = -1 it.depth = 0 it.advance() // <-- fix it diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala index 3d155337aa93..bfae792c5107 100644 --- a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala +++ b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala @@ -18,6 +18,7 @@ import java.util.{concurrent => juc} import java.{lang => jl, util => ju} import scala.{unchecked => uc} +import language.experimental.captureChecking /** Defines converter methods from Scala to Java collections. * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala index 16b15c513a17..14268f7aa165 100644 --- a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala +++ b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala @@ -16,6 +16,7 @@ package convert import java.util.{concurrent => juc} import java.{lang => jl, util => ju} +import language.experimental.captureChecking /** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ trait AsJavaExtensions { diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala index 30a28ae38147..6cc02b13bb06 100644 --- a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala +++ b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala @@ -18,6 +18,7 @@ import java.util.{concurrent => juc} import java.{lang => jl, util => ju} import scala.{unchecked => uc} +import language.experimental.captureChecking /** Defines converter methods from Java to Scala collections. * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala index 39347dde903b..d60bfc7f60a1 100644 --- a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala +++ b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala @@ -16,6 +16,7 @@ package convert import java.util.{concurrent => juc} import java.{lang => jl, util => ju} +import language.experimental.captureChecking /** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ trait AsScalaExtensions { diff --git a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala index 05d63f9fdeee..1bc284462ff1 100644 --- a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala +++ b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala @@ -19,6 +19,7 @@ import java.{lang => jl, util => ju} import scala.collection.JavaConverters._ import scala.language.implicitConversions +import language.experimental.captureChecking /** Defines implicit converter methods from Java to Scala collections. */ @deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") diff --git a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala index 29c3dcbac5db..e826bdeb23db 100644 --- a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala +++ b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala @@ -22,6 +22,8 @@ import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.chaining._ import scala.util.control.ControlThrowable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** Wrappers for exposing Scala collections as Java collections and vice-versa */ @SerialVersionUID(3L) @@ -127,7 +129,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } def addOne(elem: A): this.type = { underlying add elem; this } def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) - def insertAll(i: Int, elems: IterableOnce[A]) = { + def insertAll(i: Int, elems: IterableOnce[A]^) = { val ins = underlying.subList(0, i) elems.iterator.foreach(ins.add(_)) } @@ -136,7 +138,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { // Note: Clone cannot just call underlying.clone because in Java, only specific collections // expose clone methods. Generically, they're protected. override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { remove(from, replaced) insertAll(from, patch) this @@ -254,7 +256,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { def getKey = k def getValue = v def setValue(v1 : V) = self.put(k, v1) - + // It's important that this implementation conform to the contract // specified in the javadocs of java.util.Map.Entry.hashCode // @@ -358,7 +360,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { val result = underlying.put(k, v) if (present) Some(result) else None } else { - var result: Option[V] = None + var result: Option[V @uncheckedCaptures] = None def recompute(k0: K, v0: V): V = v.tap(_ => if (v0 != null) result = Some(v0) else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) @@ -384,7 +386,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { // support Some(null) if currently bound to null override def remove(k: K): Option[V] = { - var result: Option[V] = None + var result: Option[V @uncheckedCaptures] = None def recompute(k0: K, v0: V): V = { if (v0 != null) result = Some(v0) else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) diff --git a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala index cdeea62fb5ed..ddda95707881 100644 --- a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala +++ b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala @@ -22,6 +22,7 @@ import scala.collection._ import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} import scala.jdk.CollectionConverters._ import scala.jdk._ +import language.experimental.captureChecking /** Defines extension methods to create Java Streams for Scala collections, available through * [[scala.jdk.javaapi.StreamConverters]]. diff --git a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala index 845ecb4a606d..ba51c7a5a353 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) diff --git a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala index 7c795aea5391..8b2f604b0977 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala @@ -18,6 +18,7 @@ import java.util.Spliterator import annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] object BinaryTreeStepper { @@ -125,7 +126,7 @@ extends EfficientSplit { if (!hasStep || index < 0) null else { val root = stack(0).asInstanceOf[T] - val leftStack = + val leftStack = if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) else BinaryTreeStepper.emptyStack val leftIndex = index - 1 diff --git a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala index 574e7fd50f1c..16801089c39f 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala @@ -15,11 +15,12 @@ package impl import scala.collection.Stepper.EfficientSplit import scala.collection.{BitSetOps, IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable private[collection] final class BitSetStepper( - private var underlying: BitSetOps[_], - private var cache0: Long, private var cache1: Long, + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, _i0: Int, _iN: Int, private var cacheIndex: Int ) @@ -47,7 +48,7 @@ with IntStepper { findNext() } } - else if (underlying eq null) { + else if (underlying eq null) { i0 = iN found = false found @@ -96,7 +97,7 @@ with IntStepper { else scanLong(bits, from + 1) def nextStep(): Int = - if (found || findNext()) { + if (found || findNext()) { found = false val ans = i0 i0 += 1 diff --git a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala index 466e6c440f45..12fb471ea768 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala @@ -16,6 +16,7 @@ package impl import scala.collection.Stepper.EfficientSplit import scala.collection._ import scala.collection.immutable.Node +// import language.experimental.captureChecking // TODO enable /** A stepper that is a slightly elaborated version of the ChampBaseIterator; * the main difference is that it knows when it should stop instead of running diff --git a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala index 2d1f88d02930..7140c7d673d0 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala @@ -16,6 +16,7 @@ package impl import java.util.Spliterator import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable /** Abstracts all the generic operations of stepping over a collection * that has an indexable ordering but may have gaps. diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala index 136ac8d2dcc3..1e2983fde50d 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala index 4670ccc56bfc..cae3809ab077 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala @@ -16,6 +16,7 @@ package impl import java.util.Spliterator import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable /** Abstracts all the generic operations of stepping over an indexable collection */ private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) diff --git a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala index 68b318c04c9c..393e988959eb 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala @@ -17,6 +17,7 @@ import java.util.Spliterator import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} +// import language.experimental.captureChecking // TODO enable private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) diff --git a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala index 89e17bbf467c..7c122f901839 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala @@ -15,6 +15,7 @@ package impl import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} import scala.collection.immutable.NumericRange +// import language.experimental.captureChecking // TODO enable private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) diff --git a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala index 282ddb4aa2ad..50ab623a014e 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable /** Implements Stepper on an integer Range. You don't actually need the Range to do this, * so only the relevant parts are included. Because the arguments are protected, they are @@ -27,7 +28,7 @@ with IntStepper { val ans = myNext myNext += myStep i0 += 1 - ans + ans } else Stepper.throwNSEE() protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) diff --git a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala index 8990f462b4fd..fe127b857c45 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala @@ -18,6 +18,7 @@ import java.util.Spliterator import scala.collection.Stepper.EfficientSplit import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable /** Implements `Stepper` on a `String` where you step through chars packed into `Int`. */ diff --git a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala index cac041a5237b..6329d83bc2a0 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala @@ -15,6 +15,7 @@ package impl import scala.collection.Stepper.EfficientSplit import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int diff --git a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala index 332ec65d85fd..504e0dac63ea 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( _i0: Int, @@ -91,7 +92,7 @@ with DoubleStepper { index1 = 32 i0 = half ans - } + } } private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) @@ -109,7 +110,7 @@ with IntStepper { index1 = 32 i0 = half ans - } + } } private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) @@ -127,5 +128,5 @@ with LongStepper { index1 = 32 i0 = half ans - } + } } diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala index 4c64dec9dc1f..f76619a004fa 100644 --- a/tests/pos-special/stdlib/collection/generic/BitOperations.scala +++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala @@ -12,6 +12,7 @@ package scala.collection package generic +import language.experimental.captureChecking /** Some bit operations. diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala index 69b4b3d96e61..7eba9433b8d5 100644 --- a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala +++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala @@ -16,6 +16,8 @@ import java.io.{ObjectInputStream, ObjectOutputStream} import scala.collection.{Factory, Iterable} import scala.collection.mutable.Builder +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** The default serialization proxy for collection implementations. * @@ -27,7 +29,8 @@ import scala.collection.mutable.Builder @SerialVersionUID(3L) final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { - @transient protected var builder: Builder[A, Any] = _ + @transient protected var builder: Builder[A @uncheckedCaptures, Any] = _ + // @uncheckedCaptures OK since builder is used only locally when reading objects private[this] def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala index bf2eab6bb2a6..c309299b615b 100644 --- a/tests/pos-special/stdlib/collection/generic/IsIterable.scala +++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala @@ -12,6 +12,7 @@ package scala.collection package generic +import language.experimental.captureChecking /** A trait which can be used to avoid code duplication when defining extension * methods that should be applicable both to existing Scala collections (i.e., diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala index 7d7293037bd4..2836ca2bb520 100644 --- a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala +++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala @@ -13,6 +13,7 @@ package scala package collection package generic +import language.experimental.captureChecking /** Type class witnessing that a collection representation type `Repr` has * elements of type `A` and has a conversion to `IterableOnce[A]`. diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala index 19f75cf7bced..ad7254d2dd61 100644 --- a/tests/pos-special/stdlib/collection/generic/IsMap.scala +++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala @@ -15,6 +15,7 @@ package generic import IsMap.Tupled import scala.collection.immutable.{IntMap, LongMap} +import language.experimental.captureChecking /** * Type class witnessing that a collection type `Repr` diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala index 69ea27d087d1..8ad344c4d4fc 100644 --- a/tests/pos-special/stdlib/collection/generic/IsSeq.scala +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -14,6 +14,9 @@ package scala.collection package generic import scala.reflect.ClassTag +import language.experimental.captureChecking +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Type class witnessing that a collection representation type `Repr` has * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for @@ -51,11 +54,24 @@ object IsSeq { implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] - implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = - new IsSeq[CC0[A0]] { + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to seqViewIsIterable + */ + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsIterable[CC0[A0]] { type A = A0 type C = View[A] - def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll + } + + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to stringViewIsIterable + */ + implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } = + new IsIterable[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll } implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = @@ -68,22 +84,15 @@ object IsSeq { def apply(i: Int): Char = s.charAt(i) def toIterable: Iterable[Char] = new immutable.WrappedString(s) protected[this] def coll: String = s - protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString - def iterableFactory: IterableFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString + def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged override def empty: String = "" protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder def iterator: Iterator[Char] = s.iterator } } - implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = - new IsSeq[StringView] { - type A = Char - type C = View[Char] - def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll - } - - implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + implicit def arrayIsSeq[sealed A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = new IsSeq[Array[A0]] { type A = A0 type C = Array[A0] @@ -91,10 +100,10 @@ object IsSeq { new SeqOps[A, mutable.ArraySeq, Array[A]] { def apply(i: Int): A = a(i) def length: Int = a.length - def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + def toIterable: Iterable[A] = mutable.ArraySeq.make[A @uncheckedCaptures](a) protected def coll: Array[A] = a - protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) - def iterableFactory: IterableFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll) + def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged override def empty: Array[A] = Array.empty[A] protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder def iterator: Iterator[A] = a.iterator diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala index 223997f4e972..2c0967dbaf4b 100644 --- a/tests/pos-special/stdlib/collection/generic/Subtractable.scala +++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala @@ -13,6 +13,7 @@ package scala package collection package generic +import language.experimental.captureChecking /** This trait represents collection-like objects that can be reduced * using a '+' operator. It defines variants of `-` and `--` diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala index 0c16aa04dc98..0ba67c1bf76e 100644 --- a/tests/pos-special/stdlib/collection/generic/package.scala +++ b/tests/pos-special/stdlib/collection/generic/package.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking package object generic { diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala index 978c63034f4a..3a221fc76b6c 100644 --- a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala @@ -23,6 +23,8 @@ import scala.reflect.ClassTag import scala.runtime.ScalaRunTime import scala.util.Sorting import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * An immutable array. @@ -38,7 +40,8 @@ sealed abstract class ArraySeq[+A] with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] - with Serializable { + with Serializable + with Pure { /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype @@ -53,8 +56,10 @@ sealed abstract class ArraySeq[+A] * array of a supertype or subtype of the element type. */ def unsafeArray: Array[_] + def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]] + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq - protected def iterableEvidence: ClassTag[A @uncheckedVariance] = elemTag.asInstanceOf[ClassTag[A]] + protected def iterableEvidence: ClassTag[A @uncheckedVariance @uncheckedCaptures] = elemTag.asInstanceOf[ClassTag[A]] def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit @@ -79,10 +84,10 @@ sealed abstract class ArraySeq[+A] } override def prepended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]] override def appended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] /** Fast concatenation of two [[ArraySeq]]s. * @@ -104,8 +109,8 @@ sealed abstract class ArraySeq[+A] null else if (thisIsObj) { // A and B are objects - val ax = this.unsafeArray.asInstanceOf[Array[A]] - val ay = that.unsafeArray.asInstanceOf[Array[B]] + val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val ay = that.unsafeArray.asInstanceOf[Array[B @uncheckedCaptures]] val len = ax.length + ay.length val a = new Array[AnyRef](len) System.arraycopy(ax, 0, a, 0, ax.length) @@ -113,8 +118,8 @@ sealed abstract class ArraySeq[+A] ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] } else { // A is a primative and B = A. Use this instance's protected ClassTag. - val ax = this.unsafeArray.asInstanceOf[Array[A]] - val ay = that.unsafeArray.asInstanceOf[Array[A]] + val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val ay = that.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] val len = ax.length + ay.length val a = iterableEvidence.newArray(len) System.arraycopy(ax, 0, a, 0, ax.length) @@ -124,7 +129,7 @@ sealed abstract class ArraySeq[+A] } } - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): ArraySeq[B] = { + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = { def genericResult = { val k = suffix.knownSize if (k == 0) this @@ -147,7 +152,7 @@ sealed abstract class ArraySeq[+A] } } - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): ArraySeq[B] = { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = { def genericResult = { val k = prefix.knownSize if (k == 0) this @@ -171,7 +176,7 @@ sealed abstract class ArraySeq[+A] } } - override def zip[B](that: collection.IterableOnce[B]): ArraySeq[(A, B)] = + override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] = that match { case bs: ArraySeq[B] => ArraySeq.tabulate(length min bs.length) { i => @@ -181,35 +186,37 @@ sealed abstract class ArraySeq[+A] strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) } + private inline def ops[A](xs: Array[A @uncheckedCaptures]): ArrayOps[A] = new ArrayOps[A @uncheckedCaptures](xs) + override def take(n: Int): ArraySeq[A] = if (unsafeArray.length <= n) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).take(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]] override def takeRight(n: Int): ArraySeq[A] = if (unsafeArray.length <= n) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]] override def drop(n: Int): ArraySeq[A] = if (n <= 0) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).drop(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]] override def dropRight(n: Int): ArraySeq[A] = if (n <= 0) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]] override def slice(from: Int, until: Int): ArraySeq[A] = if (from <= 0 && unsafeArray.length <= until) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]] override def foldLeft[B](z: B)(f: (B, A) => B): B = { // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast @@ -239,13 +246,13 @@ sealed abstract class ArraySeq[+A] b } - override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).tail).asInstanceOf[ArraySeq[A]] + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]] - override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).reverse).asInstanceOf[ArraySeq[A]] + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]] override protected[this] def className = "ArraySeq" - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(unsafeArray, 0, xs, start, copied) @@ -277,18 +284,18 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => def empty[A : ClassTag]: ArraySeq[A] = emptyImpl - def from[A](it: scala.collection.IterableOnce[A])(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match { case as: ArraySeq[A] => as case _ => unsafeWrapArray(Array.from[A](it)) } def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = - ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + ArrayBuffer.newBuilder[A @uncheckedCaptures].mapResult(b => unsafeWrapArray[A](b.toArray)) override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { - val elements = Array.ofDim[A](scala.math.max(n, 0)) + val elements = Array.ofDim[A @uncheckedCaptures](scala.math.max(n, 0)) var i = 0 while (i < n) { ScalaRunTime.array_update(elements, i, f(i)) @@ -309,7 +316,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a * `ClassCastException` at runtime. */ - def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + def unsafeWrapArray[T](x: Array[T @uncheckedCaptures]): ArraySeq[T] = ((x: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala index 9461264850a9..9c2bfdad54d0 100644 --- a/tests/pos-special/stdlib/collection/immutable/BitSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -17,6 +17,7 @@ package immutable import BitSetOps.{LogWL, updateArray} import mutable.Builder import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking /** A class for immutable bitsets. * $bitsetinfo @@ -37,7 +38,7 @@ sealed abstract class BitSet override def unsorted: Set[Int] = this - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder override def empty: BitSet = bitSetFactory.empty @@ -94,7 +95,7 @@ sealed abstract class BitSet @SerialVersionUID(3L) object BitSet extends SpecificIterableFactory[Int, BitSet] { - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = it match { case bs: BitSet => bs case _ => (newBuilder ++= it).result() diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala index 711332567b0f..fc9bcb022874 100644 --- a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala +++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala @@ -16,6 +16,7 @@ package scala.collection.immutable import java.lang.Integer.bitCount import java.lang.Math.ceil import java.lang.System.arraycopy +import language.experimental.captureChecking private[collection] object Node { final val HashCodeLength = 32 @@ -112,7 +113,7 @@ private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. // If you change this code, check those also in case they also // need to be modified. - + protected var currentValueCursor: Int = 0 protected var currentValueLength: Int = 0 protected var currentValueNode: T = _ diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala index 2e8378c4d810..c364924db3a3 100644 --- a/tests/pos-special/stdlib/collection/immutable/HashMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -25,6 +25,8 @@ import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, Step import scala.runtime.AbstractFunction2 import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. @@ -161,7 +163,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) } - override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]): HashMap[K, V1] = that match { + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match { case hm: HashMap[K, V1] => if (isEmpty) hm else { @@ -384,7 +386,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: else new HashMap(newRootNode) } - override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = { if (isEmpty) { this } else { @@ -1766,7 +1768,7 @@ private final class BitmapIndexedMapNode[K, +V]( } else { mapOfNewNodes |= bitpos if (newNodes eq null) { - newNodes = mutable.Queue.empty + newNodes = mutable.Queue.empty[MapNode[K, V] @uncheckedCaptures] } newNodes += newSubNode } @@ -1851,7 +1853,7 @@ private final class BitmapIndexedMapNode[K, +V]( private final class HashCollisionMapNode[K, +V ]( val originalHash: Int, val hash: Int, - var content: Vector[(K, V @uV)] + var content: Vector[(K, V @uV) @uncheckedCaptures] ) extends MapNode[K, V] { import Node._ @@ -2155,7 +2157,7 @@ private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { private[this] var hash = 0 - private[this] var value: V = _ + private[this] var value: V @uncheckedCaptures = _ override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) def next() = { if (!hasNext) @@ -2202,7 +2204,7 @@ object HashMap extends MapFactory[HashMap] { def empty[K, V]: HashMap[K, V] = EmptyMap.asInstanceOf[HashMap[K, V]] - def from[K, V](source: collection.IterableOnce[(K, V)]): HashMap[K, V] = + def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] = source match { case hs: HashMap[K, V] => hs case _ => (newBuilder[K, V] ++= source).result() @@ -2227,12 +2229,12 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, /** The last given out HashMap as a return value of `result()`, if any, otherwise null. * Indicates that on next add, the elements should be copied to an identical structure, before continuing * mutations. */ - private var aliased: HashMap[K, V] = _ + private var aliased: HashMap[K, V] @uncheckedCaptures = _ private def isAliased: Boolean = aliased != null /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedMapNode[K, V] = newEmptyRootNode + private var rootNode: BitmapIndexedMapNode[K, V] @uncheckedCaptures = newEmptyRootNode private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = if (rootNode.size == 0) value @@ -2366,7 +2368,7 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, this } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { ensureUnaliased() xs match { case hm: HashMap[K, V] => @@ -2383,7 +2385,7 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, ) currentValueCursor += 1 } - } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position case hm: collection.mutable.HashMap[K, V] => val iter = hm.nodeIterator while (iter.hasNext) { diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala index 459fcf1682aa..38f394a7005f 100644 --- a/tests/pos-special/stdlib/collection/immutable/HashSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala @@ -23,6 +23,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.ReusableBuilder import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. @@ -1152,7 +1154,7 @@ private final class BitmapIndexedSetNode[A]( } else { mapOfNewNodes |= bitpos if (newNodes eq null) { - newNodes = mutable.Queue.empty + newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } newNodes += newSubNode } @@ -1160,7 +1162,7 @@ private final class BitmapIndexedSetNode[A]( newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty + nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } nodesToMigrateToData += newSubNode } @@ -1267,7 +1269,7 @@ private final class BitmapIndexedSetNode[A]( } else { mapOfNewNodes |= bitpos if (newNodes eq null) { - newNodes = mutable.Queue.empty + newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } newNodes += newSubNode } @@ -1275,7 +1277,7 @@ private final class BitmapIndexedSetNode[A]( newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty + nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } nodesToMigrateToData += newSubNode } @@ -1740,7 +1742,7 @@ private final class BitmapIndexedSetNode[A]( } } -private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A]) extends SetNode[A] { +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A] @uncheckedCaptures) extends SetNode[A] { import Node._ @@ -1944,7 +1946,7 @@ object HashSet extends IterableFactory[HashSet] { def empty[A]: HashSet[A] = EmptySet.asInstanceOf[HashSet[A]] - def from[A](source: collection.IterableOnce[A]): HashSet[A] = + def from[A](source: collection.IterableOnce[A]^): HashSet[A] = source match { case hs: HashSet[A] => hs case _ if source.knownSize == 0 => empty[A] @@ -1969,12 +1971,12 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has /** The last given out HashSet as a return value of `result()`, if any, otherwise null. * Indicates that on next add, the elements should be copied to an identical structure, before continuing * mutations. */ - private var aliased: HashSet[A] = _ + private var aliased: HashSet[A] @uncheckedCaptures = _ private def isAliased: Boolean = aliased != null /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedSetNode[A] = newEmptyRootNode + private var rootNode: BitmapIndexedSetNode[A] @uncheckedCaptures = newEmptyRootNode /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { @@ -2084,7 +2086,7 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has this } - override def addAll(xs: IterableOnce[A]) = { + override def addAll(xs: IterableOnce[A]^) = { ensureUnaliased() xs match { case hm: HashSet[A] => @@ -2100,7 +2102,7 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has ) currentValueCursor += 1 } - } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position case other => val it = other.iterator while(it.hasNext) addOne(it.next()) diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala index 240821b11460..d7077845b845 100644 --- a/tests/pos-special/stdlib/collection/immutable/IntMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala @@ -18,6 +18,8 @@ import scala.collection.mutable.{Builder, ImmutableBuilder} import scala.annotation.tailrec import scala.annotation.unchecked.uncheckedVariance import scala.language.implicitConversions +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Utility class for integer maps. */ @@ -52,7 +54,7 @@ object IntMap { def apply[T](elems: (Int, T)*): IntMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - def from[V](coll: IterableOnce[(Int, V)]): IntMap[V] = + def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] = newBuilder[V].addAll(coll).result() private[immutable] case object Nil extends IntMap[Nothing] { @@ -89,13 +91,13 @@ object IntMap { @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Int, AnyRef)]): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it) def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] } implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]) = IntMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it) def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] } @@ -180,9 +182,9 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] with Serializable { - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]): IntMap[T] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] = intMapFrom[T](coll) - protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]): IntMap[V2] = { + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = { val b = IntMap.newBuilder[V2] b.sizeHint(coll) b.addAll(coll) @@ -196,7 +198,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] override def empty: IntMap[T] = IntMap.Nil override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] + val buffer = new scala.collection.mutable.ListBuffer[(Int, T) @uncheckedCaptures] foreach(buffer += _) buffer.toList } @@ -327,10 +329,10 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) - override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such - override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = concat(that) + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that) def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = strictOptimizedCollect(IntMap.newBuilder[V2], pf) diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index d4199ab3ab14..c4f9900eea8b 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -13,6 +13,7 @@ package scala.collection.immutable import scala.collection.{IterableFactory, IterableFactoryDefaults} +import language.experimental.captureChecking /** A trait for collections that are guaranteed immutable. * @@ -24,13 +25,14 @@ import scala.collection.{IterableFactory, IterableFactoryDefaults} trait Iterable[+A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } @SerialVersionUID(3L) object Iterable extends IterableFactory.Delegate[Iterable](List) { - override def from[E](it: IterableOnce[E]): Iterable[E] = it match { + override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match { case iterable: Iterable[E] => iterable case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala similarity index 66% rename from tests/pos-special/stdlib/collection/immutable/LazyList.scala rename to tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala index 8b7ad26dc5ae..5684130b6048 100644 --- a/tests/pos-special/stdlib/collection/immutable/LazyList.scala +++ b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala @@ -22,21 +22,29 @@ import scala.collection.generic.SerializeEnd import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} import scala.language.implicitConversions import scala.runtime.Statics +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. * + * The class extends Iterable; it is a replacement for LazyList, which + * which implemented Seq. The reason is that under capture checking, we + * assume that all Seqs are strict, and LazyList broke that assumption. + * As a consequence, we declare LazyList is deprecated and unsafe for + * capture checking, and replace it by the current class, LazyListIterable. + * * Elements are memoized; that is, the value of each element is computed at most once. * * Elements are computed in-order and are never skipped. In other words, * accessing the tail causes the head to be computed first. * - * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you * don't know yet whether the list is empty or not. If you learn that it is non-empty, * then you also know that the head has been computed. But the tail is itself - * a `LazyList`, whose emptiness-or-not might remain undetermined. + * a `LazyListIterable`, whose emptiness-or-not might remain undetermined. * - * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. * @@ -45,7 +53,7 @@ import scala.runtime.Statics * {{{ * import scala.math.BigInt * object Main extends App { - * val fibs: LazyList[BigInt] = + * val fibs: LazyListIterable[BigInt] = * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } * fibs.take(5).foreach(println) * } @@ -65,7 +73,7 @@ import scala.runtime.Statics * {{{ * import scala.math.BigInt * object Main extends App { - * val fibs: LazyList[BigInt] = + * val fibs: LazyListIterable[BigInt] = * BigInt(0) #:: BigInt(1) #:: * fibs.zip(fibs.tail).map{ n => * println(s"Adding \${n._1} and \${n._2}") @@ -98,22 +106,22 @@ import scala.runtime.Statics * }}} * * Note that the definition of `fibs` uses `val` not `def`. The memoization of the - * `LazyList` requires us to have somewhere to store the information and a `val` + * `LazyListIterable` requires us to have somewhere to store the information and a `val` * allows us to do that. * - * Further remarks about the semantics of `LazyList`: + * Further remarks about the semantics of `LazyListIterable`: * - * - Though the `LazyList` changes as it is accessed, this does not + * - Though the `LazyListIterable` changes as it is accessed, this does not * contradict its immutability. Once the values are memoized they do * not change. Values that have yet to be memoized still "exist", they * simply haven't been computed yet. * * - One must be cautious of memoization; it can eat up memory if you're not - * careful. That's because memoization of the `LazyList` creates a structure much like + * careful. That's because memoization of the `LazyListIterable` creates a structure much like * [[scala.collection.immutable.List]]. As long as something is holding on to * the head, the head holds on to the tail, and so on recursively. * If, on the other hand, there is nothing holding on to the head (e.g. if we used - * `def` to define the `LazyList`) then once it is no longer being used directly, + * `def` to define the `LazyListIterable`) then once it is no longer being used directly, * it disappears. * * - Note that some operations, including [[drop]], [[dropWhile]], @@ -133,30 +141,30 @@ import scala.runtime.Statics * } * } * - * // Our first LazyList definition will be a val definition - * val lazylist1: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * // Our first LazyListIterable definition will be a val definition + * val lazylist1: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) * loop(0) * } * * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1 * val it1 = lazylist1.iterator * loop("Iterator1: ", it1.next(), it1) * - * // We can redefine this LazyList such that all we have is the Iterator left - * // and allow the LazyList to be garbage collected as required. Using a def - * // to provide the LazyList ensures that no val is holding onto the head as + * // We can redefine this LazyListIterable such that all we have is the Iterator left + * // and allow the LazyListIterable to be garbage collected as required. Using a def + * // to provide the LazyListIterable ensures that no val is holding onto the head as * // is the case with lazylist1 - * def lazylist2: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * def lazylist2: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) * loop(0) * } * val it2 = lazylist2.iterator * loop("Iterator2: ", it2.next(), it2) * - * // And, of course, we don't actually need a LazyList at all for such a simple - * // problem. There's no reason to use a LazyList if you don't actually need + * // And, of course, we don't actually need a LazyListIterable at all for such a simple + * // problem. There's no reason to use a LazyListIterable if you don't actually need * // one. * val it3 = new Iterator[Int] { * var i = -1 @@ -167,7 +175,7 @@ import scala.runtime.Statics * }}} * * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. - * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic. * If we defined `fibs` such that only `0` were concretely known, then the act * of determining `tail` would require the evaluation of `tail`, so the * computation would be unable to progress, as in this code: @@ -175,7 +183,7 @@ import scala.runtime.Statics * // The first time we try to access the tail we're going to need more * // information which will require us to recurse, which will require us to * // recurse, which... - * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } * }}} * * The definition of `fibs` above creates a larger number of objects than @@ -184,8 +192,8 @@ import scala.runtime.Statics * fact that it has a more direct route to the numbers themselves: * * {{{ - * lazy val fib: LazyList[Int] = { - * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * lazy val fib: LazyListIterable[Int] = { + * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n) * loop(1, 1) * } * }}} @@ -196,8 +204,8 @@ import scala.runtime.Statics * the tails content is deferred until the tails empty status, head or tail is * evaluated. * - * Delaying the evaluation of whether a LazyList is empty or not until it's needed - * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed + * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`. * * Only when it's further evaluated (which may be never!) any of the elements gets * forced. @@ -205,24 +213,24 @@ import scala.runtime.Statics * for example: * * {{{ - * def tailWithSideEffect: LazyList[Nothing] = { - * println("getting empty LazyList") - * LazyList.empty + * def tailWithSideEffect: LazyListIterable[Nothing] = { + * println("getting empty LazyListIterable") + * LazyListIterable.empty * } * - * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable" * * val suspended = 1 #:: tailWithSideEffect // doesn't print anything * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed * val filtered = tail.filter(_ => false) // still nothing is printed - * filtered.isEmpty // prints "getting empty LazyList" + * filtered.isEmpty // prints "getting empty LazyListIterable" * }}} * * @tparam A the type of the elements contained in this lazy list. * * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] * section on `LazyLists` for more information. - * @define Coll `LazyList` + * @define Coll `LazyListIterable` * @define coll lazy list * @define orderDependent * @define orderDependentFold @@ -237,23 +245,24 @@ import scala.runtime.Statics * @define evaluatesAllElements This method evaluates all elements of the collection. */ @SerialVersionUID(3L) -final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, LazyList, LazyList[A]] - with IterableFactoryDefaults[A, LazyList] +final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, LazyListIterable, LazyListIterable[A]] + with IterableFactoryDefaults[A, LazyListIterable] with Serializable { - import LazyList._ + this: LazyListIterable[A]^ => + import LazyListIterable._ @volatile private[this] var stateEvaluated: Boolean = false @inline private def stateDefined: Boolean = stateEvaluated private[this] var midEvaluation = false - private lazy val state: State[A] = { + private lazy val state: State[A]^ = { // if it's already mid-evaluation, we're stuck in an infinite // self-referential loop (also it's empty) if (midEvaluation) { - throw new RuntimeException("self-referential LazyList or a derivation thereof has no more elements") + throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") } midEvaluation = true val res = try lazyState() finally midEvaluation = false @@ -264,7 +273,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta res } - override def iterableFactory: SeqFactory[LazyList] = LazyList + override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable override def isEmpty: Boolean = state eq State.Empty @@ -276,7 +285,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def head: A = state.head - override def tail: LazyList[A] = state.tail + override def tail: LazyListIterable[A]^{this} = state.tail @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) @@ -287,13 +296,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * elements of the cycle are evaluated. For example: * * {{{ - * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring * ring.force * ring.toString * * // prints * // - * // LazyList(1, 2, 3, ...) + * // LazyListIterable(1, 2, 3, ...) * }}} * * This method will *not* terminate for non-cyclic infinite-sized collections. @@ -302,7 +311,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ def force: this.type = { // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyList[A] = this + var these, those: LazyListIterable[A]^{this} = this if (!these.isEmpty) { these = these.tail } @@ -322,7 +331,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def iterator: Iterator[A] = + override def iterator: Iterator[A]^{this} = if (knownIsEmpty) Iterator.empty else new LazyIterator(this) @@ -332,9 +341,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param f The treatment to apply to each element. * @note Overridden here as final to trigger tail-call optimization, which * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyList as elements + * necessary for allowing the GC to collect the underlying LazyListIterable as elements * are consumed. - * @note This function will force the realization of the entire LazyList + * @note This function will force the realization of the entire LazyListIterable * unless the `f` throws an exception. */ @tailrec @@ -345,12 +354,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - /** LazyList specialization of foldLeft which allows GC to collect along the + /** LazyListIterable specialization of foldLeft which allows GC to collect along the * way. * * @tparam B The type of value being accumulated. * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyList`. + * @param op The operation to perform on successive elements of the `LazyListIterable`. * @return The accumulated value from successive applications of `op`. */ @tailrec @@ -359,10 +368,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta else tail.foldLeft(op(z, head))(op) // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef = - if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + protected[this] def writeReplace(): AnyRef^{this} = + if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this - override protected[this] def className = "LazyList" + override protected[this] def className = "LazyListIterable" /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. * @@ -373,10 +382,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param suffix The collection that gets appended to this lazy list * @return The lazy list containing elements of this lazy list and the iterable object. */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = newLL { if (isEmpty) suffix match { - case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable case coll if coll.knownSize == 0 => State.Empty case coll => stateFromIterator(coll.iterator) } @@ -389,8 +398,8 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.from(suffix) + def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = + if (knownIsEmpty) LazyListIterable.from(suffix) else lazyAppendedAll(suffix) /** @inheritdoc @@ -399,19 +408,19 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - override def appended[B >: A](elem: B): LazyList[B] = - if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + def appended[B >: A](elem: B): LazyListIterable[B]^{this} = + if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty)) else lazyAppendedAll(Iterator.single(elem)) /** @inheritdoc * * $preservesLaziness */ - override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = - if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} = + if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty)) else newLL(scanLeftState(z)(op)) - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = sCons( z, newLL { @@ -420,18 +429,18 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } ) - /** LazyList specialization of reduceLeft which allows GC to collect + /** LazyListIterable specialization of reduceLeft which allows GC to collect * along the way. * * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyList`. + * @param f The operation to perform on successive elements of the `LazyListIterable`. * @return The accumulated value from successive applications of `f`. */ override def reduceLeft[B >: A](f: (B, A) => B): B = { if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") else { var reducedRes: B = this.head - var left: LazyList[A] = this.tail + var left: LazyListIterable[A]^{this} = this.tail while (!left.isEmpty) { reducedRes = f(reducedRes, left.head) left = left.tail @@ -444,13 +453,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p)) /** @inheritdoc * * $preservesLaziness */ - override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = { val (left, right) = map(f).partition(_.isLeft) (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) } @@ -459,17 +468,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def filter(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = false) + override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = false) /** @inheritdoc * * $preservesLaziness */ - override def filterNot(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = true) + override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = true) /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. * @@ -479,21 +488,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The `collection.WithFilter` returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = - new LazyList.WithFilter(coll, p) + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} = + new LazyListIterable.WithFilter(coll, p) /** @inheritdoc * * $preservesLaziness */ - override def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this)) /** @inheritdoc * * $preservesLaziness */ - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.from(prefix) + def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} = + if (knownIsEmpty) LazyListIterable.from(prefix) else if (prefix.knownSize == 0) this else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) @@ -501,17 +510,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def map[B](f: A => B): LazyList[B] = - if (knownIsEmpty) LazyList.empty + override def map[B](f: A => B): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty else (mapImpl(f): @inline) /** @inheritdoc * * $preservesLaziness */ - override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a } - private def mapImpl[B](f: A => B): LazyList[B] = + private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} = newLL { if (isEmpty) State.Empty else sCons(f(head), tail.mapImpl(f)) @@ -521,9 +530,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else LazyList.collectImpl(this, pf) + override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.collectImpl(this, pf) /** @inheritdoc * @@ -534,7 +543,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = if (isEmpty) None else { - val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B]) if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) else Some(res) } @@ -559,25 +568,25 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ // optimisations are not for speed, but for functionality // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else LazyList.flatMapImpl(this, f) + override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.flatMapImpl(this, f) /** @inheritdoc * * $preservesLaziness */ - override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable) /** @inheritdoc * * $preservesLaziness */ - override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = - if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty + override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} = + if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty else newLL(zipState(that.iterator)) - private def zipState[B](it: Iterator[B]): State[(A, B)] = + private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = if (this.isEmpty || !it.hasNext) State.Empty else sCons((head, it.next()), newLL { tail zipState it }) @@ -585,29 +594,29 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0) /** @inheritdoc * * $preservesLaziness */ - override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = { if (this.knownIsEmpty) { - if (that.knownSize == 0) LazyList.empty - else LazyList.continually(thisElem) zip that + if (that.knownSize == 0) LazyListIterable.empty + else LazyListIterable.continually(thisElem) zip that } else { - if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem)) else newLL(zipAllState(that.iterator, thisElem, thatElem)) } } - private def zipAllState[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): State[(A1, B)] = { + private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it }) else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) } else { if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zip LazyList.continually(thatElem)) + else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem)) } } @@ -620,21 +629,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * only evaluated individually as needed. */ // just in case it can be meaningfully overridden at some point - override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, LazyList.this.type] = + override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} = super.lazyZip(that) /** @inheritdoc * * $preservesLaziness */ - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) = (map(asPair(_)._1), map(asPair(_)._2)) /** @inheritdoc * * $preservesLaziness */ - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) = (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) /** @inheritdoc @@ -642,27 +651,27 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $initiallyLazy * Additionally, it preserves laziness for all except the first `n` elements. */ - override def drop(n: Int): LazyList[A] = + override def drop(n: Int): LazyListIterable[A]^{this} = if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else LazyList.dropImpl(this, n) + else if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropImpl(this, n) /** @inheritdoc * * $initiallyLazy * Additionally, it preserves laziness for all elements after the predicate returns `false`. */ - override def dropWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.dropWhileImpl(this, p) + override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropWhileImpl(this, p) /** @inheritdoc * * $initiallyLazy */ - override def dropRight(n: Int): LazyList[A] = { + override def dropRight(n: Int): LazyListIterable[A]^{this} = { if (n <= 0) this - else if (knownIsEmpty) LazyList.empty + else if (knownIsEmpty) LazyListIterable.empty else newLL { var scout = this var remaining = n @@ -675,7 +684,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - private def dropRightState(scout: LazyList[_]): State[A] = + private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} = if (scout.isEmpty) State.Empty else sCons(head, newLL(tail.dropRightState(scout.tail))) @@ -683,12 +692,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def take(n: Int): LazyList[A] = - if (knownIsEmpty) LazyList.empty + override def take(n: Int): LazyListIterable[A] = + if (knownIsEmpty) LazyListIterable.empty else (takeImpl(n): @inline) - private def takeImpl(n: Int): LazyList[A] = { - if (n <= 0) LazyList.empty + private def takeImpl(n: Int): LazyListIterable[A] = { + if (n <= 0) LazyListIterable.empty else newLL { if (isEmpty) State.Empty else sCons(head, tail.takeImpl(n - 1)) @@ -699,11 +708,11 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def takeWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty + override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty else (takeWhileImpl(p): @inline) - private def takeWhileImpl(p: A => Boolean): LazyList[A] = + private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} = newLL { if (isEmpty || !p(head)) State.Empty else sCons(head, tail.takeWhileImpl(p)) @@ -713,45 +722,29 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $initiallyLazy */ - override def takeRight(n: Int): LazyList[A] = - if (n <= 0 || knownIsEmpty) LazyList.empty - else LazyList.takeRightImpl(this, n) + override def takeRight(n: Int): LazyListIterable[A]^{this} = + if (n <= 0 || knownIsEmpty) LazyListIterable.empty + else LazyListIterable.takeRightImpl(this, n) /** @inheritdoc * * $initiallyLazy * Additionally, it preserves laziness for all but the first `from` elements. */ - override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from) /** @inheritdoc * * $evaluatesAllElements */ - override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty) - // need contravariant type B to make the compiler happy - still returns LazyList[A] + // need contravariant type B to make the compiler happy - still returns LazyListIterable[A] @tailrec - private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] = if (isEmpty) tl else tail.reverseOnto(newLL(sCons(head, tl))) - /** @inheritdoc - * - * $preservesLaziness - */ - override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else super.diff(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else super.intersect(that) - @tailrec private def lengthGt(len: Int): Boolean = if (len < 0) true @@ -763,7 +756,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method mostly preserves laziness; * a single element ahead of the iterator is evaluated. */ - override def grouped(size: Int): Iterator[LazyList[A]] = { + override def grouped(size: Int): Iterator[LazyListIterable[A]] = { require(size > 0, "size must be positive, but was " + size) slidingImpl(size = size, step = size) } @@ -773,12 +766,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method mostly preserves laziness; * `size - step max 1` elements ahead of the iterator are evaluated. */ - override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") slidingImpl(size = size, step = step) } - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = if (knownIsEmpty) Iterator.empty else new SlidingIterator[A](this, size = size, step = step) @@ -786,10 +779,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def padTo[B >: A](len: Int, elem: B): LazyList[B] = { + def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = { if (len <= 0) this else newLL { - if (isEmpty) LazyList.fill(len)(elem).state + if (isEmpty) LazyListIterable.fill(len)(elem).state else sCons(head, tail.padTo(len - 1, elem)) } } @@ -798,13 +791,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = - if (knownIsEmpty) LazyList from other + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = + if (knownIsEmpty) LazyListIterable from other else patchImpl(from, other, replaced) - private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = newLL { - if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state) else if (isEmpty) stateFromIterator(other.iterator) else sCons(head, tail.patchImpl(from - 1, other, replaced)) } @@ -814,17 +807,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $evaluatesAllElements */ // overridden just in case a lazy implementation is developed at some point - override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose /** @inheritdoc * * $preservesLaziness */ - override def updated[B >: A](index: Int, elem: B): LazyList[B] = + def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} = if (index < 0) throw new IndexOutOfBoundsException(s"$index") else updatedImpl(index, elem, index) - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = { newLL { if (index <= 0) sCons(elem, tail) else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) @@ -859,9 +852,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta else if (!isEmpty) { b.append(head) var cursor = this - @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) var scout = tail - @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { cursor = scout if (scoutNonEmpty) { @@ -883,7 +876,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta // if cursor (eq scout) has state defined, it is empty; else unknown state if (!cursor.stateDefined) b.append(sep).append("") } else { - @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state) // Cycle. // If we have a prefix of length P followed by a cycle of length C, // the scout will be at position (P%C) in the cycle when the cursor @@ -926,9 +919,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * Examples: * - * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains * a cycle at the fourth element. */ override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString @@ -963,48 +956,49 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta /** * $factoryInfo * @define coll lazy list - * @define Coll `LazyList` + * @define Coll `LazyListIterable` */ @SerialVersionUID(3L) -object LazyList extends SeqFactory[LazyList] { +object LazyListIterable extends IterableFactory[LazyListIterable] { // Eagerly evaluate cached empty instance private[this] val _empty = newLL(State.Empty).force private sealed trait State[+A] extends Serializable { + this: State[A]^ => def head: A - def tail: LazyList[A] + def tail: LazyListIterable[A]^ } private object State { @SerialVersionUID(3L) object Empty extends State[Nothing] { def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyList[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") } @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A] } - /** Creates a new LazyList. */ - @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + /** Creates a new LazyListIterable. */ + @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state) /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) private val anyToMarker: Any => Any = _ => Statics.pfMarker /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyList`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they * can continue their execution where they left off. */ - private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[filterImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var elem: A = null.asInstanceOf[A] var found = false @@ -1019,9 +1013,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[collectImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { val marker = Statics.pfMarker val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased @@ -1038,11 +1032,11 @@ object LazyList extends SeqFactory[LazyList] { } } - private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[flatMapImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { - var it: Iterator[B] = null + var it: Iterator[B @uncheckedCaptures]^{ll, f} = null var itHasNext = false var rest = restRef // var rest = restRef.elem while (!itHasNext && !rest.isEmpty) { @@ -1062,9 +1056,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric var iRef = n // val iRef = new IntRef(n) newLL { var rest = restRef // var rest = restRef.elem @@ -1079,9 +1073,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropWhileImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var rest = restRef // var rest = restRef.elem while (!rest.isEmpty && p(rest.head)) { @@ -1092,10 +1086,10 @@ object LazyList extends SeqFactory[LazyList] { } } - private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var scoutRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // same situation var remainingRef = n // val remainingRef = new IntRef(n) newLL { var scout = scoutRef // var scout = scoutRef.elem @@ -1120,117 +1114,115 @@ object LazyList extends SeqFactory[LazyList] { } } - /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl). */ object cons { /** A lazy list consisting of a given first element and remaining elements * @param hd The first element of the result lazy list * @param tl The remaining elements of the result lazy list */ - def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs) } - implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { - /** Construct a LazyList consisting of a given first element followed by elements - * from another LazyList. + extension [A](l: => LazyListIterable[A]) + /** Construct a LazyListIterable consisting of a given first element followed by elements + * from another LazyListIterable. */ - def #:: [B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) - /** Construct a LazyList consisting of the concatenation of the given LazyList and - * another LazyList. + def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) + + /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and + * another LazyListIterable. */ - def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() - } + def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l object #:: { - def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] = if (!s.isEmpty) Some((s.head, s.tail)) else None } - def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { - case lazyList: LazyList[A] => lazyList + def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match { + case lazyList: LazyListIterable[A] => lazyList case _ if coll.knownSize == 0 => empty[A] case _ => newLL(stateFromIterator(coll.iterator)) } - def empty[A]: LazyList[A] = _empty + def empty[A]: LazyListIterable[A] = _empty /** Creates a State from an Iterator, with another State appended after the Iterator * is empty. */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} = if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) else suffix /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]): State[A] = + private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} = if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) else State.Empty - override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] = if (xss.knownSize == 0) empty else newLL(concatIterator(xss.iterator)) - private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} = if (!it.hasNext) State.Empty else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) - /** An infinite LazyList that repeatedly applies a given function to a start value. + /** An infinite LazyListIterable that repeatedly applies a given function to a start value. * - * @param start the start value of the LazyList + * @param start the start value of the LazyListIterable * @param f the function that's repeatedly applied - * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[A](start: => A)(f: A => A): LazyList[A] = + def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} = newLL { val head = start sCons(head, iterate(f(head))(f)) } /** - * Create an infinite LazyList starting at `start` and incrementing by + * Create an infinite LazyListIterable starting at `start` and incrementing by * step `step`. * - * @param start the start value of the LazyList - * @param step the increment value of the LazyList - * @return the LazyList starting at value `start`. + * @param start the start value of the LazyListIterable + * @param step the increment value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. */ - def from(start: Int, step: Int): LazyList[Int] = + def from(start: Int, step: Int): LazyListIterable[Int] = newLL(sCons(start, from(start + step, step))) /** - * Create an infinite LazyList starting at `start` and incrementing by `1`. + * Create an infinite LazyListIterable starting at `start` and incrementing by `1`. * - * @param start the start value of the LazyList - * @return the LazyList starting at value `start`. + * @param start the start value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. */ - def from(start: Int): LazyList[Int] = from(start, 1) + def from(start: Int): LazyListIterable[Int] = from(start, 1) /** - * Create an infinite LazyList containing the given element expression (which + * Create an infinite LazyListIterable containing the given element expression (which * is computed for each occurrence). * - * @param elem the element composing the resulting LazyList - * @return the LazyList containing an infinite number of elem + * @param elem the element composing the resulting LazyListIterable + * @return the LazyListIterable containing an infinite number of elem */ - def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem))) - override def fill[A](n: Int)(elem: => A): LazyList[A] = + override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} = if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { - def at(index: Int): LazyList[A] = + override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = { + def at(index: Int): LazyListIterable[A]^{f} = if (index < n) newLL(sCons(f(index), at(index + 1))) else empty at(0) } // significantly simpler than the iterator returned by Iterator.unfold - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} = newLL { f(init) match { case Some((elem, state)) => sCons(elem, unfold(state)(f)) @@ -1244,9 +1236,9 @@ object LazyList extends SeqFactory[LazyList] { * @tparam A the type of the ${coll}’s elements * @return A builder for $Coll objects. */ - def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { override def hasNext: Boolean = !lazyList.isEmpty override def next(): A = @@ -1258,8 +1250,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) - extends AbstractIterator[LazyList[A]] { + private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) + extends AbstractIterator[LazyListIterable[A]] { + this: SlidingIterator[A]^ => private val minLen = size - step max 0 private var first = true @@ -1267,7 +1260,7 @@ object LazyList extends SeqFactory[LazyList] { if (first) !lazyList.isEmpty else lazyList.lengthGt(minLen) - def next(): LazyList[A] = { + def next(): LazyListIterable[A] = { if (!hasNext) Iterator.empty.next() else { first = false @@ -1278,20 +1271,21 @@ object LazyList extends SeqFactory[LazyList] { } } - private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) - extends collection.WithFilter[A, LazyList] { + private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean) + extends collection.WithFilter[A, LazyListIterable] { + this: WithFilter[A]^ => private[this] val filtered = lazyList.filter(p) - def map[B](f: A => B): LazyList[B] = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q) } - private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] { import LazyBuilder._ - private[this] var next: DeferredState[A] = _ - private[this] var list: LazyList[A] = _ + private[this] var next: DeferredState[A @uncheckedCaptures] = _ + private[this] var list: LazyListIterable[A @uncheckedCaptures] = _ clear() @@ -1301,7 +1295,7 @@ object LazyList extends SeqFactory[LazyList] { next = deferred } - override def result(): LazyList[A] = { + override def result(): LazyListIterable[A] = { next init State.Empty list } @@ -1314,10 +1308,10 @@ object LazyList extends SeqFactory[LazyList] { } // lazy implementation which doesn't evaluate the collection being added - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { if (xs.knownSize != 0) { val deferred = new DeferredState[A] - next init stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()) + next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval())) next = deferred } this @@ -1326,16 +1320,17 @@ object LazyList extends SeqFactory[LazyList] { private object LazyBuilder { final class DeferredState[A] { - private[this] var _state: () => State[A] = _ + this: DeferredState[A]^ => + private[this] var _state: (() => State[A]^) @uncheckedCaptures = _ - def eval(): State[A] = { + def eval(): State[A]^ = { val state = _state if (state == null) throw new IllegalStateException("uninitialized") state() } // racy - def init(state: => State[A]): Unit = { + def init(state: => State[A]^): Unit = { if (_state != null) throw new IllegalStateException("already initialized") _state = () => state } @@ -1348,7 +1343,7 @@ object LazyList extends SeqFactory[LazyList] { * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. */ @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + final class SerializationProxy[A](@transient protected var coll: LazyListIterable[A]^) extends Serializable { private[this] def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() @@ -1363,15 +1358,15 @@ object LazyList extends SeqFactory[LazyList] { private[this] def readObject(in: ObjectInputStream): Unit = { in.defaultReadObject() - val init = new mutable.ListBuffer[A] + val init = new mutable.ListBuffer[A @uncheckedCaptures] var initRead = false while (!initRead) in.readObject match { case SerializeEnd => initRead = true case a => init += a.asInstanceOf[A] } - val tail = in.readObject().asInstanceOf[LazyList[A]] + val tail = in.readObject().asInstanceOf[LazyListIterable[A]] // scala/scala#10118: caution that no code path can evaluate `tail.state` - // before the resulting LazyList is returned + // before the resulting LazyListIterable is returned val it = init.toList.iterator coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) } diff --git a/tests/pos-special/stdlib/collection/immutable/List.scala b/tests/pos-special/stdlib/collection/immutable/List.scala index 5358922752fb..6a305f4ebdec 100644 --- a/tests/pos-special/stdlib/collection/immutable/List.scala +++ b/tests/pos-special/stdlib/collection/immutable/List.scala @@ -14,11 +14,12 @@ package scala package collection package immutable -import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.annotation.tailrec import mutable.{Builder, ListBuffer} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking /** A class for immutable linked lists representing ordered collections * of elements of type `A`. @@ -143,7 +144,7 @@ sealed abstract class List[+A] override def prepended[B >: A](elem: B): List[B] = elem :: this - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): List[B] = prefix match { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): List[B] = prefix match { case xs: List[B] => xs ::: this case _ if prefix.knownSize == 0 => this case b: ListBuffer[B] if this.isEmpty => b.toList @@ -165,7 +166,7 @@ sealed abstract class List[+A] } // When calling appendAll with another list `suffix`, avoid copying `suffix` - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): List[B] = suffix match { + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): List[B] = suffix match { case xs: List[B] => this ::: xs case _ => super.appendedAll(suffix) } @@ -214,7 +215,7 @@ sealed abstract class List[+A] // dropRight is inherited from LinearSeq override def splitAt(n: Int): (List[A], List[A]) = { - val b = new ListBuffer[A] + val b = new ListBuffer[A @uncheckedCaptures] var i = 0 var these = this while (!these.isEmpty && i < n) { @@ -257,7 +258,7 @@ sealed abstract class List[+A] } } - final override def collect[B](pf: PartialFunction[A, B]): List[B] = { + final override def collect[B](pf: PartialFunction[A, B]^): List[B] = { if (this eq Nil) Nil else { var rest = this var h: ::[B] = null @@ -285,7 +286,7 @@ sealed abstract class List[+A] } } - final override def flatMap[B](f: A => IterableOnce[B]): List[B] = { + final override def flatMap[B](f: A => IterableOnce[B]^): List[B] = { var rest = this var h: ::[B] = null var t: ::[B] = null @@ -306,7 +307,7 @@ sealed abstract class List[+A] } @inline final override def takeWhile(p: A => Boolean): List[A] = { - val b = new ListBuffer[A] + val b = new ListBuffer[A @uncheckedCaptures] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -316,7 +317,7 @@ sealed abstract class List[+A] } @inline final override def span(p: A => Boolean): (List[A], List[A]) = { - val b = new ListBuffer[A] + val b = new ListBuffer[A @uncheckedCaptures] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -651,7 +652,7 @@ sealed abstract class List[+A] // Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or // before a newly-allocated, thread-local :: instance is aliased (e.g. in ListBuffer.toList) -final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally +final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance @uncheckedCaptures]) // sound because `next` is used only locally extends List[A] { releaseFence() override def headOption: Some[A] = Some(head) @@ -666,7 +667,7 @@ case object Nil extends List[Nothing] { override def init: Nothing = throw new UnsupportedOperationException("init of empty list") override def knownSize: Int = 0 override def iterator: Iterator[Nothing] = Iterator.empty - override def unzip[A1, A2](implicit asPair: Nothing => (A1, A2)): (List[A1], List[A2]) = EmptyUnzip + override def unzip[A1, A2](implicit asPair: Nothing -> (A1, A2)): (List[A1], List[A2]) = EmptyUnzip @transient private[this] val EmptyUnzip = (Nil, Nil) @@ -681,9 +682,9 @@ case object Nil extends List[Nothing] { object List extends StrictOptimizedSeqFactory[List] { private val TupleOfNil = (Nil, Nil) - def from[B](coll: collection.IterableOnce[B]): List[B] = Nil.prependedAll(coll) + def from[B](coll: collection.IterableOnce[B]^): List[B] = Nil.prependedAll(coll) - def newBuilder[A]: Builder[A, List[A]] = new ListBuffer() + def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A @uncheckedCaptures]() def empty[A]: List[A] = Nil diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala index 4a2b8dbd807c..c5000d785144 100644 --- a/tests/pos-special/stdlib/collection/immutable/ListMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala @@ -19,6 +19,8 @@ import scala.collection.mutable.ReusableBuilder import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * This class implements immutable maps using a list-based data structure. List map iterators and @@ -131,8 +133,8 @@ object ListMap extends MapFactory[ListMap] { */ private[immutable] final class Node[K, V]( override private[immutable] val key: K, - private[immutable] var _value: V, - private[immutable] var _init: ListMap[K, V] + private[immutable] var _value: V @uncheckedCaptures, + private[immutable] var _init: ListMap[K, V] @uncheckedCaptures ) extends ListMap[K, V] { releaseFence() @@ -239,7 +241,7 @@ object ListMap extends MapFactory[ListMap] { private object EmptyListMap extends ListMap[Any, Nothing] - def from[K, V](it: collection.IterableOnce[(K, V)]): ListMap[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] = it match { case lm: ListMap[K, V] => lm case lhm: collection.mutable.LinkedHashMap[K, V] => @@ -285,7 +287,7 @@ object ListMap extends MapFactory[ListMap] { */ private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { private[this] var isAliased: Boolean = false - private[this] var underlying: ListMap[K, V] = ListMap.empty + private[this] var underlying: ListMap[K, V] @uncheckedCaptures = ListMap.empty override def clear(): Unit = { underlying = ListMap.empty @@ -322,7 +324,7 @@ private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuil } this } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { if (isAliased) { super.addAll(xs) } else if (underlying.nonEmpty) { diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala index e2ab0de858da..719abd78e1e6 100644 --- a/tests/pos-special/stdlib/collection/immutable/ListSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala @@ -17,6 +17,8 @@ package immutable import mutable.{Builder, ImmutableBuilder} import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * This class implements immutable sets using a list-based data structure. List set iterators and @@ -117,7 +119,7 @@ sealed class ListSet[A] @SerialVersionUID(3L) object ListSet extends IterableFactory[ListSet] { - def from[E](it: scala.collection.IterableOnce[E]): ListSet[E] = + def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] = it match { case ls: ListSet[E] => ls case _ if it.knownSize == 0 => empty[E] diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala index c418dc7616ac..4abf433273f2 100644 --- a/tests/pos-special/stdlib/collection/immutable/LongMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala @@ -20,6 +20,8 @@ import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} import scala.annotation.tailrec import scala.annotation.unchecked.uncheckedVariance import scala.language.implicitConversions +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Utility class for long maps. */ @@ -52,7 +54,7 @@ object LongMap { def apply[T](elems: (Long, T)*): LongMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - def from[V](coll: IterableOnce[(Long, V)]): LongMap[V] = + def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] = newBuilder[V].addAll(coll).result() def newBuilder[V]: Builder[(Long, V), LongMap[V]] = @@ -86,13 +88,13 @@ object LongMap { @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] } implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] } @@ -176,7 +178,7 @@ sealed abstract class LongMap[+T] extends AbstractMap[Long, T] with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] with Serializable { - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T)] @uncheckedVariance): LongMap[T] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = { //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? val b = newSpecificBuilder b.sizeHint(coll) @@ -191,7 +193,7 @@ sealed abstract class LongMap[+T] extends AbstractMap[Long, T] override def empty: LongMap[T] = LongMap.Nil override def toList = { - val buffer = new ListBuffer[(Long, T)] + val buffer = new ListBuffer[(Long, T) @uncheckedCaptures] foreach(buffer += _) buffer.toList } @@ -478,10 +480,10 @@ sealed abstract class LongMap[+T] extends AbstractMap[Long, T] def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such - override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(that) + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that) def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = strictOptimizedCollect(LongMap.newBuilder[V2], pf) diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala index 9d334893b8cc..6daad829bf55 100644 --- a/tests/pos-special/stdlib/collection/immutable/Map.scala +++ b/tests/pos-special/stdlib/collection/immutable/Map.scala @@ -18,6 +18,8 @@ import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.DefaultSerializable import scala.collection.immutable.Map.Map4 import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Base type of immutable Maps */ trait Map[K, +V] @@ -39,7 +41,7 @@ trait Map[K, +V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - def withDefault[V1 >: V](d: K => V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) + def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -86,10 +88,10 @@ trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C] * @return a new $coll that contains all elements of the current $coll * except one less occurrence of each of the elements of `elems`. */ - def removedAll(keys: IterableOnce[K]): C = keys.iterator.foldLeft[C](coll)(_ - _) + def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _) /** Alias for `removedAll` */ - @`inline` final override def -- (keys: IterableOnce[K]): C = removedAll(keys) + @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys) /** Creates a new map obtained by updating this map with a given key/value pair. * @param key the key @@ -153,7 +155,7 @@ trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapO with collection.StrictOptimizedMapOps[K, V, CC, C] with StrictOptimizedIterableOps[(K, V), Iterable, C] { - override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]): CC[K, V1] = { + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = { var result: CC[K, V1] = coll val it = that.iterator while (it.hasNext) result = result + it.next() @@ -171,7 +173,7 @@ trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapO object Map extends MapFactory[Map] { @SerialVersionUID(3L) - class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K => V) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V) extends AbstractMap[K, V] with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { @@ -187,7 +189,7 @@ object Map extends MapFactory[Map] { override def mapFactory: MapFactory[Map] = underlying.mapFactory - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = new WithDefault(underlying.concat(xs), defaultValue) def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) @@ -197,7 +199,7 @@ object Map extends MapFactory[Map] { override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override protected def fromSpecific(coll: collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = new WithDefault[K, V](mapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = @@ -206,7 +208,7 @@ object Map extends MapFactory[Map] { def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): Map[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] = it match { case it: Iterable[_] if it.isEmpty => empty[K, V] case m: Map[K, V] => m @@ -229,7 +231,7 @@ object Map extends MapFactory[Map] { override def valuesIterator: Iterator[Nothing] = Iterator.empty def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) def removed(key: Any): Map[Any, Nothing] = this - override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]): Map[Any, V2] = suffix match { + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match { case m: immutable.Map[Any, V2] => m case _ => super.concat(suffix) } @@ -313,7 +315,7 @@ object Map extends MapFactory[Map] { override protected def nextResult(k: K, v: V): V = v } - private abstract class Map2Iterator[A] extends AbstractIterator[A] { + private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure { private[this] var i = 0 override def hasNext: Boolean = i < 2 override def next(): A = { @@ -416,7 +418,7 @@ object Map extends MapFactory[Map] { override protected def nextResult(k: K, v: V): V = v } - private abstract class Map3Iterator[A] extends AbstractIterator[A] { + private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure { private[this] var i = 0 override def hasNext: Boolean = i < 3 override def next(): A = { @@ -536,7 +538,7 @@ object Map extends MapFactory[Map] { override protected def nextResult(k: K, v: V): V = v } - private abstract class Map4Iterator[A] extends AbstractIterator[A] { + private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure { private[this] var i = 0 override def hasNext: Boolean = i < 4 override def next(): A = { @@ -639,9 +641,9 @@ object Map extends MapFactory[Map] { abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { - private[this] var elems: Map[K, V] = Map.empty + private[this] var elems: Map[K, V] @uncheckedCaptures = Map.empty private[this] var switchedToHashMapBuilder: Boolean = false - private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ + private[this] var hashMapBuilder: HashMapBuilder[K, V] @uncheckedCaptures = _ private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) @@ -682,7 +684,7 @@ private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, def addOne(elem: (K, V)) = addOne(elem._1, elem._2) - override def addAll(xs: IterableOnce[(K, V)]): this.type = + override def addAll(xs: IterableOnce[(K, V)]^): this.type = if (switchedToHashMapBuilder) { hashMapBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala index d1ee494711a7..f26d9728e5ad 100644 --- a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala +++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala @@ -14,6 +14,8 @@ package scala.collection.immutable import scala.collection.Stepper.EfficientSplit import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** `NumericRange` is a more generic version of the * `Range` class which works with arbitrary types. @@ -492,7 +494,7 @@ object NumericRange { import num.mkNumericOps private[this] var _hasNext = !self.isEmpty - private[this] var _next: T = self.start + private[this] var _next: T @uncheckedCaptures = self.start private[this] val lastElement: T = if (_hasNext) self.last else self.start override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 def hasNext: Boolean = _hasNext diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala index 3d0f8206b6a9..929c79ce588a 100644 --- a/tests/pos-special/stdlib/collection/immutable/Queue.scala +++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala @@ -15,6 +15,7 @@ package immutable import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{Builder, ListBuffer} +import language.experimental.captureChecking /** `Queue` objects implement data structures that allow to * insert and retrieve elements in a first-in-first-out (FIFO) manner. @@ -119,7 +120,7 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) - override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = { val newIn = that match { case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) case that: List[B] => that reverse_::: this.in @@ -200,9 +201,9 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L */ @SerialVersionUID(3L) object Queue extends StrictOptimizedSeqFactory[Queue] { - def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + def newBuilder[sealed A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) - def from[A](source: IterableOnce[A]): Queue[A] = source match { + def from[A](source: IterableOnce[A]^): Queue[A] = source match { case q: Queue[A] => q case _ => val list = List.from(source) diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala index 66a149840488..459591d1a9cb 100644 --- a/tests/pos-special/stdlib/collection/immutable/Range.scala +++ b/tests/pos-special/stdlib/collection/immutable/Range.scala @@ -17,6 +17,7 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl.RangeStepper import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** The `Range` class represents integer values in range * ''[start;end)'' with non-zero step value `step`. @@ -213,7 +214,7 @@ sealed abstract class Range( private[this] def posOf(i: Int): Int = if (contains(i)) (i - start) / step else -1 - override def sameElements[B >: Int](that: IterableOnce[B]): Boolean = that match { + override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match { case other: Range => (this.length : @annotation.switch) match { case 0 => other.isEmpty @@ -613,7 +614,7 @@ object Range { // As there is no appealing default step size for not-really-integral ranges, // we offer a partially constructed object. - class Partial[T, U](private val f: T => U) extends AnyVal { + class Partial[T, U](private val f: T -> U) extends AnyVal { def by(x: T): U = f(x) override def toString = "Range requires step" } diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala index 2e7aa7b472ad..5fbc927d7a21 100644 --- a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala +++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala @@ -17,6 +17,8 @@ package immutable import scala.annotation.meta.{getter, setter} import scala.annotation.tailrec import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. * @@ -834,10 +836,11 @@ private[collection] object RedBlackTree { * we potentially do so in `startFrom`. */ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - new Array[Tree[A, B]](maximumHeight) + new Array[Tree[A, B] @uncheckedCaptures](maximumHeight) } private[this] var index = 0 - protected var lookahead: Tree[A, B] = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + protected var lookahead: Tree[A, B] @uncheckedCaptures = + if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) /** * Find the leftmost subtree whose key is equal to the given key, or if no such thing, diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index 925fd648c70c..d575c3aaf14a 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -14,6 +14,8 @@ package scala package collection package immutable +import language.experimental.captureChecking + trait Seq[+A] extends Iterable[A] with collection.Seq[A] with SeqOps[A, Seq, Seq[A]] @@ -37,7 +39,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] */ @SerialVersionUID(3L) object Seq extends SeqFactory.Delegate[Seq](List) { - override def from[E](it: IterableOnce[E]): Seq[E] = it match { + override def from[E](it: IterableOnce[E]^): Seq[E] = it match { case s: Seq[E] => s case _ => super.from(it) } @@ -57,7 +59,7 @@ trait IndexedSeq[+A] extends Seq[A] } - override def sameElements[B >: A](o: IterableOnce[B]): Boolean = o match { + override def sameElements[B >: A](o: IterableOnce[B]^): Boolean = o match { case that: IndexedSeq[_] => (this eq that) || { val length = this.length @@ -110,7 +112,7 @@ object IndexedSeqDefaults { @SerialVersionUID(3L) object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](Vector) { - override def from[E](it: IterableOnce[E]): IndexedSeq[E] = it match { + override def from[E](it: IterableOnce[E]^): IndexedSeq[E] = it match { case is: IndexedSeq[E] => is case _ => super.from(it) } @@ -141,14 +143,14 @@ trait LinearSeq[+A] @SerialVersionUID(3L) object LinearSeq extends SeqFactory.Delegate[LinearSeq](List) { - override def from[E](it: IterableOnce[E]): LinearSeq[E] = it match { + override def from[E](it: IterableOnce[E]^): LinearSeq[E] = it match { case ls: LinearSeq[E] => ls case _ => super.from(it) } } trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] - extends Any with SeqOps[A, CC, C] + extends AnyRef with SeqOps[A, CC, C] with collection.LinearSeqOps[A, CC, C] /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala index aca9e139165e..6c955fd52fc2 100644 --- a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala @@ -15,6 +15,8 @@ package collection package immutable import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** A base trait for ordered, immutable maps. * @@ -44,7 +46,7 @@ trait SeqMap[K, +V] object SeqMap extends MapFactory[SeqMap] { def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): SeqMap[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] = it match { case sm: SeqMap[K, V] => sm case _ => (newBuilder[K, V] ++= it).result() @@ -228,9 +230,9 @@ object SeqMap extends MapFactory[SeqMap] { } private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { - private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var elems: SeqMap[K, V] @uncheckedCaptures = SeqMap.empty private[this] var switchedToVectorMapBuilder: Boolean = false - private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] @uncheckedCaptures = _ override def clear(): Unit = { elems = SeqMap.empty @@ -265,7 +267,7 @@ object SeqMap extends MapFactory[SeqMap] { this } - override def addAll(xs: IterableOnce[(K, V)]): this.type = + override def addAll(xs: IterableOnce[(K, V)]^): this.type = if (switchedToVectorMapBuilder) { vectorMapBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala index f07eb66991c8..ac92f81b2013 100644 --- a/tests/pos-special/stdlib/collection/immutable/Set.scala +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -16,6 +16,8 @@ package immutable import scala.collection.immutable.Set.Set4 import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** Base trait for immutable set collections */ trait Set[A] extends Iterable[A] @@ -94,7 +96,7 @@ object Set extends IterableFactory[Set] { def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] - def from[E](it: collection.IterableOnce[E]): Set[E] = + def from[E](it: collection.IterableOnce[E]^): Set[E] = it match { // We want `SortedSet` (and subclasses, such as `BitSet`) to // rebuild themselves to avoid element type widening issues @@ -128,7 +130,7 @@ object Set extends IterableFactory[Set] { private[collection] def emptyInstance: Set[Any] = EmptySet @SerialVersionUID(3L) - private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure { private[this] var current = 0 private[this] var remainder = n override def knownSize: Int = remainder @@ -351,9 +353,9 @@ abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A * $multipleResults */ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { - private[this] var elems: Set[A] = Set.empty + private[this] var elems: Set[A @uncheckedCaptures] = Set.empty private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A] = _ + private[this] var hashSetBuilder: HashSetBuilder[A @uncheckedCaptures] = _ override def clear(): Unit = { elems = Set.empty @@ -388,7 +390,7 @@ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { this } - override def addAll(xs: IterableOnce[A]): this.type = + override def addAll(xs: IterableOnce[A]^): this.type = if (switchedToHashSetBuilder) { hashSetBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala index 666d8c55bfb0..9587502fd908 100644 --- a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala @@ -16,6 +16,7 @@ package immutable import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder +import language.experimental.captureChecking /** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. * @@ -69,7 +70,7 @@ trait SortedMap[K, +V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - override def withDefault[V1 >: V](d: K => V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -123,7 +124,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapO with collection.StrictOptimizedSortedMapOps[K, V, CC, C] with StrictOptimizedMapOps[K, V, Map, C] { - override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = { + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = { var result: CC[K, V2] = coll val it = xs.iterator while (it.hasNext) result = result + it.next() @@ -134,12 +135,12 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapO @SerialVersionUID(3L) object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - override def from[K: Ordering, V](it: IterableOnce[(K, V)]): SortedMap[K, V] = it match { + override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match { case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm case _ => super.from(it) } - final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V) + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V) extends Map.WithDefault[K, V](underlying, defaultValue) with SortedMap[K, V] with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { @@ -161,14 +162,14 @@ object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated(key, value), defaultValue) - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = new WithDefault( underlying.concat(xs) , defaultValue) override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala index 303e5ea9658c..874abcaecda1 100644 --- a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -13,6 +13,7 @@ package scala package collection package immutable +import language.experimental.captureChecking /** Base trait for sorted sets */ trait SortedSet[A] @@ -50,7 +51,7 @@ trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[ */ @SerialVersionUID(3L) object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { - override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match { case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/immutable/Stream.scala b/tests/pos-special/stdlib/collection/immutable/Stream.scala deleted file mode 100644 index ae03641e97dd..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Stream.scala +++ /dev/null @@ -1,568 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.io.{ObjectInputStream, ObjectOutputStream} -import java.lang.{StringBuilder => JStringBuilder} - -import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic.SerializeEnd -import scala.collection.mutable.{ArrayBuffer, StringBuilder} -import scala.language.implicitConversions -import Stream.cons - -@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") -@SerialVersionUID(3L) -sealed abstract class Stream[+A] extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, Stream, Stream[A]] - with IterableFactoryDefaults[A, Stream] - with Serializable { - def tail: Stream[A] - - /** Forces evaluation of the whole `Stream` and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: this.type - - override def iterableFactory: SeqFactory[Stream] = Stream - - override protected[this] def className: String = "Stream" - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying Stream as elements - * are consumed. - * @note This function will force the realization of the entire Stream - * unless the `f` throws an exception. - */ - @tailrec - override final def foreach[U](f: A => U): Unit = { - if (!this.isEmpty) { - f(head) - tail.foreach(f) - } - } - - @tailrec - override final def find(p: A => Boolean): Option[A] = { - if(isEmpty) None - else if(p(head)) Some(head) - else tail.find(p) - } - - override def take(n: Int): Stream[A] = { - if (n <= 0 || isEmpty) Stream.empty - else if (n == 1) new Stream.Cons(head, Stream.empty) - else new Stream.Cons(head, tail.take(n - 1)) - } - - /** Stream specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override final def foldLeft[B](z: B)(op: (B, A) => B): B = { - if (this.isEmpty) z - else tail.foldLeft(op(z, head))(op) - } - - /** The stream resulting from the concatenation of this stream with the argument stream. - * @param rest The collection that gets appended to this stream - * @return The stream containing elements of this stream and the iterable object. - */ - @deprecated("The `append` operation has been renamed `lazyAppendedAll`", "2.13.0") - @inline final def append[B >: A](rest: => IterableOnce[B]): Stream[B] = lazyAppendedAll(rest) - - protected[this] def writeReplace(): AnyRef = - if(nonEmpty && tailDefined) new Stream.SerializationProxy[A](this) else this - - /** Prints elements of this stream one by one, separated by commas. */ - @deprecated(message = """Use print(stream.force.mkString(", ")) instead""", since = "2.13.0") - @inline def print(): Unit = Console.print(this.force.mkString(", ")) - - /** Prints elements of this stream one by one, separated by `sep`. - * @param sep The separator string printed between consecutive elements. - */ - @deprecated(message = "Use print(stream.force.mkString(sep)) instead", since = "2.13.0") - @inline def print(sep: String): Unit = Console.print(this.force.mkString(sep)) - - /** The stream resulting from the concatenation of this stream with the argument stream. - * - * @param suffix The collection that gets appended to this stream - * @return The stream containing elements of this stream and the iterable object. - */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): Stream[B] = - if (isEmpty) iterableFactory.from(suffix) else cons[B](head, tail.lazyAppendedAll(suffix)) - - override def scanLeft[B](z: B)(op: (B, A) => B): Stream[B] = - if (isEmpty) z +: iterableFactory.empty - else cons(z, tail.scanLeft(op(z, head))(op)) - - /** Stream specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `f`. - */ - override final def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left: Stream[A] = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) - - override def filter(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = false) - - override def filterNot(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = true) - - private[immutable] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { - // optimization: drop leading prefix of elems for which f returns false - // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise - var rest: Stream[A] = coll - while (rest.nonEmpty && p(rest.head) == isFlipped) rest = rest.tail - // private utility func to avoid `this` on stack (would be needed for the lazy arg) - if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) - else iterableFactory.empty - } - - /** A `collection.WithFilter` which allows GC of the head of stream during processing */ - override final def withFilter(p: A => Boolean): collection.WithFilter[A, Stream] = - Stream.withFilter(coll, p) - - override final def prepended[B >: A](elem: B): Stream[B] = cons(elem, coll) - - override final def map[B](f: A => B): Stream[B] = - if (isEmpty) iterableFactory.empty - else cons(f(head), tail.map(f)) - - @tailrec override final def collect[B](pf: PartialFunction[A, B]): Stream[B] = - if(isEmpty) Stream.empty - else { - var newHead: B = null.asInstanceOf[B] - val runWith = pf.runWith((b: B) => newHead = b) - if(runWith(head)) Stream.collectedTail(newHead, this, pf) - else tail.collect(pf) - } - - @tailrec override final def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = - if(isEmpty) None - else { - var newHead: B = null.asInstanceOf[B] - val runWith = pf.runWith((b: B) => newHead = b) - if(runWith(head)) Some(newHead) - else tail.collectFirst(pf) - } - - // optimisations are not for speed, but for functionality - // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override final def flatMap[B](f: A => IterableOnce[B]): Stream[B] = - if (isEmpty) iterableFactory.empty - else { - // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty - var nonEmptyPrefix: Stream[A] = coll - var prefix = iterableFactory.from(f(nonEmptyPrefix.head)) - while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { - nonEmptyPrefix = nonEmptyPrefix.tail - if(!nonEmptyPrefix.isEmpty) - prefix = iterableFactory.from(f(nonEmptyPrefix.head)) - } - - if (nonEmptyPrefix.isEmpty) iterableFactory.empty - else prefix.lazyAppendedAll(nonEmptyPrefix.tail.flatMap(f)) - } - - override final def zip[B](that: collection.IterableOnce[B]): Stream[(A, B)] = - if (this.isEmpty || that.isEmpty) iterableFactory.empty - else { - val thatIterable = that match { - case that: collection.Iterable[B] => that - case _ => LazyList.from(that) - } - cons[(A, B)]((this.head, thatIterable.head), this.tail.zip(thatIterable.tail)) - } - - override final def zipWithIndex: Stream[(A, Int)] = this.zip(LazyList.from(0)) - - protected def tailDefined: Boolean - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * Undefined elements are represented with `"_"`, an undefined tail is represented with `"<not computed>"`, - * and cycles are represented with `"<cycle>"`. - * - * @param sb the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { - force - addStringNoForce(sb.underlying, start, sep, end) - sb - } - - private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { - b.append(start) - if (nonEmpty) { - b.append(head) - var cursor = this - def appendCursorElement(): Unit = b.append(sep).append(cursor.head) - if (tailDefined) { // If tailDefined, also !isEmpty - var scout = tail - if (cursor ne scout) { - cursor = scout - if (scout.tailDefined) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scout.tailDefined) { - appendCursorElement() - cursor = cursor.tail - scout = scout.tail - if (scout.tailDefined) scout = scout.tail - } - } - } - if (!scout.tailDefined) { // Not a cycle, scout hit an end - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - if (cursor.nonEmpty) { - appendCursorElement() - } - } - else { - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (runner ne scout) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if ((cursor eq scout) && (k > 0)) { - appendCursorElement() - cursor = cursor.tail - } - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - } - } - if (cursor.nonEmpty) { - // Either undefined or cyclic; we can check with tailDefined - if (!cursor.tailDefined) b.append(sep).append("") - else b.append(sep).append("") - } - } - b.append(end) - } - - /** - * @return a string representation of this collection. Undefined elements are - * represented with `"_"`, an undefined tail is represented with `"<not computed>"`, - * and cycles are represented with `"<cycle>"` - * - * Examples: - * - * - `"Stream(_, <not computed>)"`, a non-empty stream, whose head has not been - * evaluated ; - * - `"Stream(_, 1, _, <not computed>)"`, a stream with at least three elements, - * the second one has been evaluated ; - * - `"Stream(1, 2, 3, <cycle>)"`, an infinite stream that contains - * a cycle at the fourth element. - */ - override def toString = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString - - @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") - override def hasDefiniteSize: Boolean = isEmpty || { - if (!tailDefined) false - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } -} - -@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") -@SerialVersionUID(3L) -object Stream extends SeqFactory[Stream] { - - /* !!! #11997 This `object cons` must be defined lexically *before* `class Cons` below. - * Otherwise it prevents Scala.js from building on Windows. - */ - /** An alternative way of building and matching Streams using Stream.cons(hd, tl). - */ - object cons { - /** A stream consisting of a given first element and remaining elements - * @param hd The first element of the result stream - * @param tl The remaining elements of the result stream - */ - def apply[A](hd: A, tl: => Stream[A]): Stream[A] = new Cons(hd, tl) - - /** Maps a stream to its head and tail */ - def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs) - } - - //@SerialVersionUID(3L) //TODO Putting an annotation on Stream.empty causes a cyclic dependency in unpickling - object Empty extends Stream[Nothing] { - override def isEmpty: Boolean = true - override def head: Nothing = throw new NoSuchElementException("head of empty stream") - override def tail: Stream[Nothing] = throw new UnsupportedOperationException("tail of empty stream") - /** Forces evaluation of the whole `Stream` and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: this.type = this - override def knownSize: Int = 0 - protected def tailDefined: Boolean = false - } - - @SerialVersionUID(3L) - final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] { - override def isEmpty: Boolean = false - @volatile private[this] var tlVal: Stream[A] = _ - @volatile private[this] var tlGen = () => tl - protected def tailDefined: Boolean = tlGen eq null - override def tail: Stream[A] = { - if (!tailDefined) - synchronized { - if (!tailDefined) { - tlVal = tlGen() - tlGen = null - } - } - tlVal - } - - /** Forces evaluation of the whole `Stream` and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: this.type = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: Stream[A] = this - if (!these.isEmpty) these = these.tail - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - } - - implicit def toDeferrer[A](l: => Stream[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[Stream] (private val l: () => Stream[A]) extends AnyVal { - /** Construct a Stream consisting of a given first element followed by elements - * from another Stream. - */ - def #:: [B >: A](elem: B): Stream[B] = new Cons(elem, l()) - /** Construct a Stream consisting of the concatenation of the given Stream and - * another Stream. - */ - def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix lazyAppendedAll l() - } - - object #:: { - def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = - if (s.nonEmpty) Some((s.head, s.tail)) else None - } - - def from[A](coll: collection.IterableOnce[A]): Stream[A] = coll match { - case coll: Stream[A] => coll - case _ => fromIterator(coll.iterator) - } - - /** - * @return A `Stream[A]` that gets its elements from the given `Iterator`. - * - * @param it Source iterator - * @tparam A type of elements - */ - // Note that the resulting `Stream` will be effectively iterable more than once because - // `Stream` memoizes its elements - def fromIterator[A](it: Iterator[A]): Stream[A] = - if (it.hasNext) { - new Stream.Cons(it.next(), fromIterator(it)) - } else Stream.Empty - - def empty[A]: Stream[A] = Empty - - override def newBuilder[A]: mutable.Builder[A, Stream[A]] = ArrayBuffer.newBuilder[A].mapResult(array => from(array)) - - private[immutable] def withFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean): collection.WithFilter[A, Stream] = - new WithFilter[A](l, p) - - private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] { - private[this] var s = l // set to null to allow GC after filtered - private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter - def map[B](f: A => B): Stream[B] = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f) - def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, Stream] = new WithFilter(filtered, q) - } - - /** An infinite Stream that repeatedly applies a given function to a start value. - * - * @param start the start value of the Stream - * @param f the function that's repeatedly applied - * @return the Stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A)(f: A => A): Stream[A] = { - cons(start, iterate(f(start))(f)) - } - - /** - * Create an infinite Stream starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the Stream - * @param step the increment value of the Stream - * @return the Stream starting at value `start`. - */ - def from(start: Int, step: Int): Stream[Int] = - cons(start, from(start + step, step)) - - /** - * Create an infinite Stream starting at `start` and incrementing by `1`. - * - * @param start the start value of the Stream - * @return the Stream starting at value `start`. - */ - def from(start: Int): Stream[Int] = from(start, 1) - - /** - * Create an infinite Stream containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting Stream - * @return the Stream containing an infinite number of elem - */ - def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) - - - private[Stream] def filteredTail[A](stream: Stream[A] @uncheckedVariance, p: A => Boolean, isFlipped: Boolean) = { - cons(stream.head, stream.tail.filterImpl(p, isFlipped)) - } - - private[Stream] def collectedTail[A, B](head: B, stream: Stream[A] @uncheckedVariance, pf: PartialFunction[A, B]) = { - cons(head, stream.tail.collect(pf)) - } - - /** This serialization proxy is used for Streams which start with a sequence of evaluated cons cells. - * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses - * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization - * of long evaluated streams without exhausting the stack through recursive serialization of cons cells. - */ - @SerialVersionUID(3L) - class SerializationProxy[A](@transient protected var coll: Stream[A]) extends Serializable { - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - var these = coll - while(these.nonEmpty && these.tailDefined) { - out.writeObject(these.head) - these = these.tail - } - out.writeObject(SerializeEnd) - out.writeObject(these) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val init = new ArrayBuffer[A] - var initRead = false - while (!initRead) in.readObject match { - case SerializeEnd => initRead = true - case a => init += a.asInstanceOf[A] - } - val tail = in.readObject().asInstanceOf[Stream[A]] - coll = (init ++: tail) - } - - protected[this] def readResolve(): Any = coll - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala index db5192edc36c..b1e4622971fb 100644 --- a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala @@ -13,6 +13,8 @@ package scala package collection package immutable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations to take advantage of strict builders. @@ -23,11 +25,11 @@ trait StrictOptimizedSeqOps[+A, +CC[_], +C] with collection.StrictOptimizedSeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { - override def distinctBy[B](f: A => B): C = { + override def distinctBy[B](f: A -> B): C = { if (lengthCompare(1) <= 0) coll else { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B] + val seen = mutable.HashSet.empty[B @uncheckedCaptures] val it = this.iterator var different = false while (it.hasNext) { @@ -57,7 +59,7 @@ trait StrictOptimizedSeqOps[+A, +CC[_], +C] b.result() } - override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = { + override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = { val b = iterableFactory.newBuilder[B] var i = 0 val it = iterator diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala index a51c7b9e7bf6..ff01ad7806ec 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala @@ -20,6 +20,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.immutable.{RedBlackTree => RB} import scala.collection.mutable.ReusableBuilder import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An immutable SortedMap whose values are stored in a red-black tree. * @@ -138,7 +140,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = newMapOrSelf(RB.update(tree, key, value, overwrite = true)) - override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] = newMapOrSelf(that match { case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => RB.union(tree, tm.tree) @@ -158,7 +160,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va adder.finalTree }) - override def removedAll(keys: IterableOnce[K]): TreeMap[K, V] = keys match { + override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match { case ts: TreeSet[K] if ordering == ts.ordering => newMapOrSelf(RB.difference(tree, ts.tree)) case _ => super.removedAll(keys) @@ -269,7 +271,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va private final class Adder[B1 >: V] extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { - private var currentMutableTree: RB.Tree[K,B1] = tree0 + private var currentMutableTree: RB.Tree[K,B1] @uncheckedCaptures = tree0 def finalTree = beforePublish(currentMutableTree) override def apply(kv: (K, B1)): Unit = { currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) @@ -299,7 +301,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() - def from[K, V](it: IterableOnce[(K, V)])(implicit ordering: Ordering[K]): TreeMap[K, V] = + def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] = it match { case tm: TreeMap[K, V] if ordering == tm.ordering => tm case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => @@ -320,7 +322,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { extends RB.MapHelper[K, V] with ReusableBuilder[(K, V), TreeMap[K, V]] { type Tree = RB.Tree[K, V] - private var tree:Tree = null + private var tree:Tree @uncheckedCaptures = null def addOne(elem: (K, V)): this.type = { tree = mutableUpd(tree, elem._1, elem._2) @@ -329,7 +331,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { private object adder extends AbstractFunction2[K, V, Unit] { // we cache tree to avoid the outer access to tree // in the hot path (apply) - private[this] var accumulator :Tree = null + private[this] var accumulator: Tree @uncheckedCaptures = null def addForEach(hasForEach: collection.Map[K, V]): Unit = { accumulator = tree hasForEach.foreachEntry(this) @@ -343,7 +345,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { } } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { xs match { // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= // for the moment we have to force immutability before the union diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala index 80bafb1cf3be..91233669e5ca 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -15,6 +15,8 @@ package collection package immutable import scala.annotation.tailrec +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements an immutable map that preserves order using * a hash map for the key to value mapping to provide efficient lookup, @@ -204,7 +206,7 @@ final class TreeSeqMap[K, +V] private ( new TreeSeqMap(ong, mng, ordinal, orderedBy) } else { // Populate with builder otherwise - val bdr = newBuilder[K, V](orderedBy) + val bdr = newBuilder[K @uncheckedCaptures, V @uncheckedCaptures](orderedBy) val iter = ordering.iterator var i = 0 while (i < f) { @@ -222,7 +224,7 @@ final class TreeSeqMap[K, +V] private ( } override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2, V2](orderedBy) + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) val iter = ordering.iterator while (iter.hasNext) { val k = iter.next() @@ -233,8 +235,8 @@ final class TreeSeqMap[K, +V] private ( bdr.result() } - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2, V2](orderedBy) + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) val iter = ordering.iterator while (iter.hasNext) { val k = iter.next() @@ -249,7 +251,7 @@ final class TreeSeqMap[K, +V] private ( } override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2, V2](orderedBy) + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) val iter = ordering.iterator while (iter.hasNext) { val k = iter.next() @@ -259,7 +261,7 @@ final class TreeSeqMap[K, +V] private ( bdr.result() } - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): TreeSeqMap[K, V2] = { + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = { var ong: Ordering[K] = ordering var mng: Mapping[K, V2] = mapping var ord = increment(ordinal) @@ -302,7 +304,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { else EmptyByInsertion }.asInstanceOf[TreeSeqMap[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): TreeSeqMap[K, V] = + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] = it match { case om: TreeSeqMap[K, V] => om case _ => (newBuilder[K, V] ++= it).result() @@ -310,10 +312,10 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 - def newBuilder[K, V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) - def newBuilder[K, V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + def newBuilder[sealed K, sealed V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[sealed K, sealed V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) - final class Builder[K, V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + final class Builder[sealed K, sealed V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { private[this] val bdr = new MapBuilderImpl[K, (Int, V)] private[this] var ong = Ordering.empty[K] private[this] var ord = 0 @@ -435,7 +437,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" } - final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T @uncheckedCaptures] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] else Bin[S](prefix, mask, left, right) @@ -607,7 +609,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { } final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { - var rear = Ordering.empty[T] + var rear: Ordering[T @uncheckedCaptures] = Ordering.empty[T] var i = n (modifyOrRemove { (o, v) => i -= 1 diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala index f0be91b72acc..c4241b818c38 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala @@ -19,7 +19,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.ReusableBuilder import scala.collection.immutable.{RedBlackTree => RB} import scala.runtime.AbstractFunction1 - +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable sorted sets using a tree. * @@ -239,7 +240,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] - def from[E](it: scala.collection.IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = it match { case ts: TreeSet[E] if ordering == ts.ordering => ts case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => @@ -262,14 +263,14 @@ object TreeSet extends SortedIterableFactory[TreeSet] { extends RB.SetHelper[A] with ReusableBuilder[A, TreeSet[A]] { type Tree = RB.Tree[A, Any] - private [this] var tree:RB.Tree[A, Any] = null + private [this] var tree:RB.Tree[A @uncheckedCaptures, Any] = null override def addOne(elem: A): this.type = { tree = mutableUpd(tree, elem) this } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { xs match { // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= // for the moment we have to force immutability before the union diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala index aa3fac5acd69..d9d33add512d 100644 --- a/tests/pos-special/stdlib/collection/immutable/Vector.scala +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -24,6 +24,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.immutable.VectorInline._ import scala.collection.immutable.VectorStatics._ import scala.collection.mutable.ReusableBuilder +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** $factoryInfo @@ -35,7 +37,7 @@ object Vector extends StrictOptimizedSeqFactory[Vector] { def empty[A]: Vector[A] = Vector0 - def from[E](it: collection.IterableOnce[E]): Vector[E] = + def from[E](it: collection.IterableOnce[E]^): Vector[E] = it match { case v: Vector[E] => v case _ => @@ -191,21 +193,21 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): Vector[B] = { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = { val k = prefix.knownSize if (k == 0) this else if (k < 0) super.prependedAll(prefix) else prependedAll0(prefix, k) } - override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]): Vector[B] = { + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = { val k = suffix.knownSize if (k == 0) this else if (k < 0) super.appendedAll(suffix) else appendedAll0(suffix, k) } - protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = { + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = { // k >= 0, k = prefix.knownSize val tinyAppendLimit = 4 + vectorSliceCount if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { @@ -223,11 +225,11 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va } else super.prependedAll(prefix) } - protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { // k >= 0, k = suffix.knownSize val tinyAppendLimit = 4 + vectorSliceCount if (k < tinyAppendLimit) { - var v: Vector[B] = this + var v: Vector[B @uncheckedCaptures] = this suffix match { case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) case _ => suffix.iterator.foreach(x => v = v.appended(x)) @@ -263,7 +265,7 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va /** Length of all slices up to and including index */ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) override def toVector: Vector[A] = this @@ -369,10 +371,10 @@ private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { } } - override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = Vector.from(prefix) - override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = Vector.from(suffix) override protected[this] def ioob(index: Int): IndexOutOfBoundsException = @@ -423,13 +425,13 @@ private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case data1b => new Vector1(data1b) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val data1b = append1IfSpace(prefix1, suffix) if(data1b ne null) new Vector1(data1b) else super.appendedAll0(suffix, k) @@ -518,7 +520,7 @@ private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int case 2 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -529,7 +531,7 @@ private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -640,7 +642,7 @@ private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int case 4 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -652,7 +654,7 @@ private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -783,7 +785,7 @@ private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int case 6 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -796,7 +798,7 @@ private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -947,7 +949,7 @@ private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int case 8 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -961,7 +963,7 @@ private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -1132,7 +1134,7 @@ private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int case 10 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -1147,7 +1149,7 @@ private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -1814,7 +1816,7 @@ final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { this } - override def addAll(xs: IterableOnce[A]): this.type = xs match { + override def addAll(xs: IterableOnce[A]^): this.type = xs match { case v: Vector[_] => if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) else addVector(v.asInstanceOf[Vector[A]]) @@ -2183,7 +2185,7 @@ private object VectorStatics { ac.asInstanceOf[Array[T]] } - final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { case it: Iterable[_] => if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { it.size match { @@ -2206,7 +2208,7 @@ private object VectorStatics { } else null } - final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { case it: Iterable[_] => if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { it.size match { @@ -2391,7 +2393,7 @@ private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLe take(_until) } - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val xsLen = xs.length val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) var copied = 0 @@ -2466,7 +2468,7 @@ private class LongVectorStepper(it: NewVectorIterator[Long]) // The following definitions are needed for binary compatibility with ParVector private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { - private[immutable] var it: NewVectorIterator[A @uncheckedVariance] = _ + private[immutable] var it: NewVectorIterator[A @uncheckedVariance @uncheckedCaptures] = _ def hasNext: Boolean = it.hasNext def next(): A = it.next() private[collection] def remainingElementCount: Int = it.size diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala index cd8cf06c5c68..0860a0b47f28 100644 --- a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala @@ -15,6 +15,8 @@ package collection package immutable import scala.annotation.tailrec +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. * @@ -58,7 +60,7 @@ final class VectorMap[K, +V] private ( } } - override def withDefault[V1 >: V](d: K => V1): Map[K, V1] = + override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault(this, d) override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = @@ -229,7 +231,7 @@ object VectorMap extends MapFactory[VectorMap] { def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): VectorMap[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] = it match { case vm: VectorMap[K, V] => vm case _ => (newBuilder[K, V] ++= it).result() @@ -241,7 +243,7 @@ object VectorMap extends MapFactory[VectorMap] { private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { private[this] val vectorBuilder = new VectorBuilder[K] private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] - private[this] var aliased: VectorMap[K, V] = _ + private[this] var aliased: VectorMap[K, V] @uncheckedCaptures = _ // OK since VectorMapBuilder is private override def clear(): Unit = { vectorBuilder.clear() diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala index f2fdb8e3c32e..47fe769c81ef 100644 --- a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala +++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala @@ -17,6 +17,7 @@ import scala.Predef.{wrapString => _, assert} import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl.CharStringStepper import scala.collection.mutable.{Builder, StringBuilder} +import language.experimental.captureChecking /** * This class serves as a wrapper augmenting `String`s with all the operations @@ -34,11 +35,12 @@ import scala.collection.mutable.{Builder, StringBuilder} @SerialVersionUID(3L) final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with IndexedSeqOps[Char, IndexedSeq, WrappedString] - with Serializable { + with Serializable + with Pure { def apply(i: Int): Char = self.charAt(i) - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(coll) + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder override def empty: WrappedString = WrappedString.empty @@ -65,13 +67,13 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi r.asInstanceOf[S with EfficientSplit] } - override def startsWith[B >: Char](that: IterableOnce[B], offset: Int = 0): Boolean = + override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean = that match { case s: WrappedString => self.startsWith(s.self, offset) case _ => super.startsWith(that, offset) } - override def endsWith[B >: Char](that: collection.Iterable[B]): Boolean = + override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean = that match { case s: WrappedString => self.endsWith(s.self) case _ => super.endsWith(that) @@ -88,7 +90,7 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi case _ => super.lastIndexOf(elem, end) } - override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = + override def copyToArray[sealed B >: Char](xs: Array[B], start: Int, len: Int): Int = (xs: Any) match { case chs: Array[Char] => val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) @@ -97,13 +99,13 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi case _ => super.copyToArray(xs, start, len) } - override def appendedAll[B >: Char](suffix: IterableOnce[B]): IndexedSeq[B] = + override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] = suffix match { case s: WrappedString => new WrappedString(self concat s.self) case _ => super.appendedAll(suffix) } - override def sameElements[B >: Char](o: IterableOnce[B]) = o match { + override def sameElements[B >: Char](o: IterableOnce[B]^) = o match { case s: WrappedString => self == s.self case _ => super.sameElements(o) } @@ -123,7 +125,7 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi */ @SerialVersionUID(3L) object WrappedString extends SpecificIterableFactory[Char, WrappedString] { - def fromSpecific(it: IterableOnce[Char]): WrappedString = { + def fromSpecific(it: IterableOnce[Char]^): WrappedString = { val b = newBuilder val s = it.knownSize if(s >= 0) b.sizeHint(s) diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala index 8458429727e8..985ef22859be 100644 --- a/tests/pos-special/stdlib/collection/immutable/package.scala +++ b/tests/pos-special/stdlib/collection/immutable/package.scala @@ -11,7 +11,7 @@ */ package scala.collection - +import language.experimental.captureChecking package object immutable { type StringOps = scala.collection.StringOps diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala index c02a10770696..a6413649e219 100644 --- a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala @@ -17,6 +17,8 @@ package mutable import scala.annotation.nowarn import scala.collection.generic.DefaultSerializationProxy import scala.language.implicitConversions +import language.experimental.captureChecking + /** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. * @@ -41,7 +43,7 @@ import scala.language.implicitConversions * rapidly as 2^30^ is approached. * */ -class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) +class AnyRefMap[K <: AnyRef, sealed V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean) extends AbstractMap[K, V] with MapOps[K, V, Map, AnyRefMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] @@ -51,7 +53,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi def this() = this(AnyRefMap.exceptionDefault, 16, true) /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: K => V) = this(defaultEntry, 16, true) + def this(defaultEntry: K -> V) = this(defaultEntry, 16, true) /** Creates a new `AnyRefMap` with an initial buffer of specified size. * @@ -61,7 +63,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ - def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) private[this] var mask = 0 private[this] var _size = 0 @@ -87,7 +89,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz } - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): AnyRefMap[K,V] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = { var sz = coll.knownSize if(sz < 0) sz = 4 val arm = new AnyRefMap[K, V](sz * 2) @@ -393,24 +395,24 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) + override def + [sealed V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + override def + [sealed V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { val m = this + elem1 + elem2 if(elems.isEmpty) m else m.concat(elems) } - override def concat[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = { + override def concat[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = { val arm = clone().asInstanceOf[AnyRefMap[K, V2]] xs.iterator.foreach(kv => arm += kv) arm } - override def ++[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = concat(xs) + override def ++[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs) @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") - override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + override def updated[sealed V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { @@ -435,7 +437,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi * Unlike `mapValues`, this method generates a new * collection immediately. */ - def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { + def mapValuesNow[sealed V1](f: V => V1): AnyRefMap[K, V1] = { val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) val kz = java.util.Arrays.copyOf(_keys, _keys.length) @@ -476,11 +478,11 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) - def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + def map[K2 <: AnyRef, sealed V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = AnyRefMap.from(new View.Map(this, f)) - def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + def flatMap[K2 <: AnyRef, sealed V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = AnyRefMap.from(new View.FlatMap(this, f)) - def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + def collect[K2 <: AnyRef, sealed V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) override def clear(): Unit = { @@ -504,7 +506,7 @@ object AnyRefMap { private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - private class ExceptionDefault extends (Any => Nothing) with Serializable { + private class ExceptionDefault extends (Any -> Nothing) with Serializable { def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) } private val exceptionDefault = new ExceptionDefault @@ -513,7 +515,7 @@ object AnyRefMap { * * This builder can be reused to create multiple instances. */ - final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { + final class AnyRefMapBuilder[K <: AnyRef, sealed V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] def addOne(entry: (K, V)): this.type = { elems += entry @@ -525,11 +527,11 @@ object AnyRefMap { } /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ - def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + def apply[K <: AnyRef, sealed V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) - def newBuilder[K <: AnyRef, V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + def newBuilder[K <: AnyRef, sealed V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] - private def buildFromIterableOnce[K <: AnyRef, V](elems: IterableOnce[(K, V)]): AnyRefMap[K, V] = { + private def buildFromIterableOnce[K <: AnyRef, sealed V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = { var sz = elems.knownSize if(sz < 0) sz = 4 val arm = new AnyRefMap[K, V](sz * 2) @@ -539,10 +541,10 @@ object AnyRefMap { } /** Creates a new empty `AnyRefMap`. */ - def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] + def empty[K <: AnyRef, sealed V]: AnyRefMap[K, V] = new AnyRefMap[K, V] /** Creates a new empty `AnyRefMap` with the supplied default */ - def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + def withDefault[K <: AnyRef, sealed V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) /** Creates a new `AnyRefMap` from an existing source collection. A source collection * which is already an `AnyRefMap` gets cloned. @@ -552,7 +554,7 @@ object AnyRefMap { * @tparam V the type of the values * @return a new `AnyRefMap` with the elements of `source` */ - def from[K <: AnyRef, V](source: IterableOnce[(K, V)]): AnyRefMap[K, V] = source match { + def from[K <: AnyRef, sealed V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match { case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] case _ => buildFromIterableOnce(source) } @@ -560,7 +562,7 @@ object AnyRefMap { /** Creates a new `AnyRefMap` from arrays of keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ - def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { + def fromZip[K <: AnyRef, sealed V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { val sz = math.min(keys.length, values.length) val arm = new AnyRefMap[K, V](sz * 2) var i = 0 @@ -572,7 +574,7 @@ object AnyRefMap { /** Creates a new `AnyRefMap` from keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ - def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = { + def fromZip[K <: AnyRef, sealed V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = { val sz = math.min(keys.size, values.size) val arm = new AnyRefMap[K, V](sz * 2) val ki = keys.iterator @@ -582,20 +584,20 @@ object AnyRefMap { arm } - implicit def toFactory[K <: AnyRef, V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + implicit def toFactory[K <: AnyRef, sealed V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] } - implicit def toBuildFrom[K <: AnyRef, V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + implicit def toBuildFrom[K <: AnyRef, sealed V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]) = AnyRefMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it) def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] } - implicit def iterableFactory[K <: AnyRef, V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) - implicit def buildFromAnyRefMap[K <: AnyRef, V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) + implicit def iterableFactory[K <: AnyRef, sealed V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, sealed V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) } diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala index e3ddeb71ef8e..8fa1e6edd566 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -20,6 +20,8 @@ import scala.annotation.nowarn import scala.annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An implementation of the `Buffer` class using an array to * represent the assembled sequence internally. Append, update and random @@ -40,7 +42,7 @@ import scala.collection.generic.DefaultSerializable * @define willNotTerminateInf */ @SerialVersionUID(-1582447879429021880L) -class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) +class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize: Int) extends AbstractBuffer[A] with IndexedBuffer[A] with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] @@ -151,7 +153,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) } // Overridden to use array copying for efficiency where possible. - override def addAll(elems: IterableOnce[A]): this.type = { + override def addAll(elems: IterableOnce[A]^): this.type = { elems match { case elems: ArrayBuffer[_] => val elemsLength = elems.size0 @@ -180,7 +182,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) this } - def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = { checkWithinBounds(index, index) elems match { case elems: collection.Iterable[A] => @@ -234,12 +236,12 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "ArrayBuffer" - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(array, 0, xs, start, copied) @@ -256,7 +258,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { if (length > 1) { mutationCount += 1 - scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]], 0, length) } this } @@ -291,7 +293,7 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { final val DefaultInitialSize = 16 private[this] val emptyArray = new Array[AnyRef](0) - def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + def from[sealed B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = { val k = coll.knownSize if (k >= 0) { // Avoid reallocation of buffer if length is known @@ -303,12 +305,12 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { else new ArrayBuffer[B] ++= coll } - def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + def newBuilder[sealed A]: Builder[A, ArrayBuffer[A]] = new GrowableBuilder[A, ArrayBuffer[A]](empty) { override def sizeHint(size: Int): Unit = elems.ensureSize(size) } - def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + def empty[sealed A]: ArrayBuffer[A] = new ArrayBuffer[A]() /** * @param arrayLen the length of the backing array @@ -357,22 +359,23 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { } // TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` -final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) - extends AbstractIndexedSeqView[A] { +final class ArrayBufferView[sealed A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int) + extends AbstractIndexedSeqView[A], Pure { + /* Removed since it poses problems for capture checking @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") def this(array: Array[AnyRef], length: Int) = { // this won't actually track mutation, but it would be a pain to have the implementation // check if we have a method to get the current mutation count or not on every method and // change what it does based on that. hopefully no one ever calls this. this({ - val _array = array + val _array: Array[Object] = array val _length = length new ArrayBuffer[A](0) { this.array = _array this.size0 = _length - } + }: ArrayBuffer[A] }, () => 0) - } + }*/ @deprecated("never intended to be public", since = "2.13.7") def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] @@ -392,10 +395,10 @@ final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], muta override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala index 454527bcdebd..0620d3d23061 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -13,6 +13,7 @@ package scala.collection package mutable +import language.experimental.captureChecking import scala.reflect.ClassTag /** A builder class for arrays. @@ -20,7 +21,7 @@ import scala.reflect.ClassTag * @tparam T the type of the elements for the builder. */ @SerialVersionUID(3L) -sealed abstract class ArrayBuilder[T] +sealed abstract class ArrayBuilder[sealed T] extends ReusableBuilder[T, Array[T]] with Serializable { protected[this] var capacity: Int = 0 @@ -57,7 +58,7 @@ sealed abstract class ArrayBuilder[T] this } - override def addAll(xs: IterableOnce[T]): this.type = { + override def addAll(xs: IterableOnce[T]^): this.type = { val k = xs.knownSize if (k > 0) { ensureSize(this.size + k) @@ -493,7 +494,7 @@ object ArrayBuilder { this } - override def addAll(xs: IterableOnce[Unit]): this.type = { + override def addAll(xs: IterableOnce[Unit]^): this.type = { size += xs.iterator.size this } diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala index 205e1607f824..f22aacec65c5 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -18,6 +18,7 @@ import scala.annotation.nowarn import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.reflect.ClassTag +import language.experimental.captureChecking /** An implementation of a double-ended queue that internally uses a resizable circular buffer. * @@ -36,7 +37,7 @@ import scala.reflect.ClassTag * @define mayNotTerminateInf * @define willNotTerminateInf */ -class ArrayDeque[A] protected ( +class ArrayDeque[sealed A] protected ( protected var array: Array[AnyRef], private[ArrayDeque] var start: Int, private[ArrayDeque] var end: Int @@ -99,7 +100,7 @@ class ArrayDeque[A] protected ( this } - override def prependAll(elems: IterableOnce[A]): this.type = { + override def prependAll(elems: IterableOnce[A]^): this.type = { val it = elems.iterator if (it.nonEmpty) { val n = length @@ -130,7 +131,7 @@ class ArrayDeque[A] protected ( this } - override def addAll(elems: IterableOnce[A]): this.type = { + override def addAll(elems: IterableOnce[A]^): this.type = { elems.knownSize match { case srcLength if srcLength > 0 => ensureSize(srcLength + length) @@ -176,7 +177,7 @@ class ArrayDeque[A] protected ( } } - def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { requireBounds(idx, length+1) val n = length if (idx == 0) { @@ -462,7 +463,7 @@ class ArrayDeque[A] protected ( protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = new ArrayDeque[A](array, start = 0, end) - override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](dest: Array[B], destStart: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) if (copied > 0) { copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) @@ -470,7 +471,7 @@ class ArrayDeque[A] protected ( copied } - override def toArray[B >: A: ClassTag]: Array[B] = + override def toArray[sealed B >: A: ClassTag]: Array[B] = copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) /** @@ -525,7 +526,7 @@ class ArrayDeque[A] protected ( @SerialVersionUID(3L) object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { - def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + def from[sealed B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = { val s = coll.knownSize if (s >= 0) { val array = alloc(s) @@ -535,14 +536,14 @@ object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { } else new ArrayDeque[B]() ++= coll } - def newBuilder[A]: Builder[A, ArrayDeque[A]] = + def newBuilder[sealed A]: Builder[A, ArrayDeque[A]] = new GrowableBuilder[A, ArrayDeque[A]](empty) { override def sizeHint(size: Int): Unit = { elems.ensureSize(size) } } - def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + def empty[sealed A]: ArrayDeque[A] = new ArrayDeque[A]() final val DefaultInitialSize = 16 diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala index 74ab6b2107e5..bd3a208a94c0 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -19,6 +19,8 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl._ import scala.reflect.ClassTag import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same @@ -34,23 +36,25 @@ import scala.util.hashing.MurmurHash3 * @define willNotTerminateInf */ @SerialVersionUID(3L) -sealed abstract class ArraySeq[T] +sealed abstract class ArraySeq[sealed T] extends AbstractSeq[T] with IndexedSeq[T] with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] - with Serializable { + with Serializable + with Pure { override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged - override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] val s = coll.knownSize if(s > 0) b.sizeHint(s) b ++= coll ArraySeq.make(b.result()) } - override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = + ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive @@ -71,9 +75,9 @@ sealed abstract class ArraySeq[T] override protected[this] def className = "ArraySeq" /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]]) - override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: T](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(array, 0, xs, start, copied) @@ -89,10 +93,10 @@ sealed abstract class ArraySeq[T] } override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]]) this } } @@ -107,9 +111,9 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it)) - def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) /** * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type @@ -123,7 +127,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` * at runtime. */ - def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + def make[sealed T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala index 69ecc122c1f9..dcb8a157389b 100644 --- a/tests/pos-special/stdlib/collection/mutable/BitSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -17,6 +17,7 @@ package mutable import scala.collection.immutable.Range import BitSetOps.{LogWL, MaxSize} import scala.annotation.implicitNotFound +import language.experimental.captureChecking /** * A class for mutable bitsets. @@ -47,7 +48,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) def this() = this(0) - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder override def empty: BitSet = bitSetFactory.empty @@ -187,7 +188,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = super.zip(that) - override def addAll(xs: IterableOnce[Int]): this.type = xs match { + override def addAll(xs: IterableOnce[Int]^): this.type = xs match { case bs: collection.BitSet => this |= bs case range: Range => @@ -260,7 +261,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) super.subsetOf(other) } - override def subtractAll(xs: IterableOnce[Int]): this.type = xs match { + override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match { case bs: collection.BitSet => this &~= bs case other => super.subtractAll(other) } @@ -360,7 +361,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) @SerialVersionUID(3L) object BitSet extends SpecificIterableFactory[Int, BitSet] { - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it) def empty: BitSet = new BitSet() diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 847b924735ce..0f472dc9ac82 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -14,10 +14,12 @@ package scala.collection package mutable import scala.annotation.nowarn +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** A `Buffer` is a growable and shrinkable `Seq`. */ -trait Buffer[A] +trait Buffer[sealed A] extends Seq[A] with SeqOps[A, Buffer, Buffer[A]] with Growable[A] @@ -48,19 +50,19 @@ trait Buffer[A] /** Appends the elements contained in a iterable object to this buffer. * @param xs the iterable object containing the elements to append. */ - @`inline` final def appendAll(xs: IterableOnce[A]): this.type = addAll(xs) + @`inline` final def appendAll(xs: IterableOnce[A]^): this.type = addAll(xs) /** Alias for `prepend` */ @`inline` final def +=: (elem: A): this.type = prepend(elem) - def prependAll(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } + def prependAll(elems: IterableOnce[A]^): this.type = { insertAll(0, elems); this } @deprecated("Use prependAll instead", "2.13.0") @`inline` final def prepend(elems: A*): this.type = prependAll(elems) /** Alias for `prependAll` */ - @inline final def ++=:(elems: IterableOnce[A]): this.type = prependAll(elems) + @inline final def ++=:(elems: IterableOnce[A]^): this.type = prependAll(elems) /** Inserts a new element at a given index into this buffer. * @@ -81,7 +83,7 @@ trait Buffer[A] * @throws IndexOutOfBoundsException if `idx` is out of bounds. */ @throws[IndexOutOfBoundsException] - def insertAll(idx: Int, elems: IterableOnce[A]): Unit + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit /** Removes the element at a given index position. * @@ -103,7 +105,7 @@ trait Buffer[A] @throws[IndexOutOfBoundsException] @throws[IllegalArgumentException] def remove(idx: Int, count: Int): Unit - + /** Removes a single element from this buffer, at its first occurrence. * If the buffer does not contain that element, it is unchanged. * @@ -132,7 +134,7 @@ trait Buffer[A] @deprecated("use dropRightInPlace instead", since = "2.13.4") def trimEnd(n: Int): Unit = dropRightInPlace(n) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type // +=, ++=, clear inherited from Growable // Per remark of @ichoran, we should preferably not have these: @@ -180,11 +182,11 @@ trait IndexedBuffer[A] extends IndexedSeq[A] override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer - def flatMapInPlace(f: A => IterableOnce[A]): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[IterableOnce[A]](s) + val newElems = new Array[(IterableOnce[A]^) @uncheckedCaptures](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 @@ -207,7 +209,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] if (i == j) this else takeInPlace(j) } - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { val replaced0 = math.min(math.max(replaced, 0), length) val i = math.min(math.max(from, 0), length) var j = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/Builder.scala b/tests/pos-special/stdlib/collection/mutable/Builder.scala index 0ecc06dff061..dd57cb75da91 100644 --- a/tests/pos-special/stdlib/collection/mutable/Builder.scala +++ b/tests/pos-special/stdlib/collection/mutable/Builder.scala @@ -12,6 +12,9 @@ package scala.collection.mutable +import language.experimental.captureChecking + + /** Base trait for collection builders. * * After calling `result()` the behavior of a Builder (which is not also a [[scala.collection.mutable.ReusableBuilder]]) @@ -20,7 +23,8 @@ package scala.collection.mutable * * @see [[scala.collection.mutable.ReusableBuilder]] for Builders which can be reused after calling `result()` */ -trait Builder[-A, +To] extends Growable[A] { self => +trait Builder[-A, +To] extends Growable[A] { + self: Builder[A, To]^ => /** Clears the contents of this builder. * After execution of this method the builder will contain no elements. @@ -51,7 +55,7 @@ trait Builder[-A, +To] extends Growable[A] { self => * @param coll the collection which serves as a hint for the result's size. * @param delta a correction to add to the `coll.size` to produce the size hint. */ - final def sizeHint(coll: scala.collection.IterableOnce[_], delta: Int = 0): Unit = { + final def sizeHint(coll: scala.collection.IterableOnce[_]^, delta: Int = 0): Unit = { val s = coll.knownSize if (s != -1) sizeHint(s + delta) } @@ -69,7 +73,7 @@ trait Builder[-A, +To] extends Growable[A] { self => * than collection's size are reduced. */ // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility - final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { + final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]^): Unit = { val s = boundingColl.knownSize if (s != -1) { sizeHint(scala.math.min(s, size)) @@ -77,10 +81,10 @@ trait Builder[-A, +To] extends Growable[A] { self => } /** A builder resulting from this builder my mapping the result using `f`. */ - def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo] = new Builder[A, NewTo] { + def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { def addOne(x: A): this.type = { self += x; this } def clear(): Unit = self.clear() - override def addAll(xs: IterableOnce[A]): this.type = { self ++= xs; this } + override def addAll(xs: IterableOnce[A]^): this.type = { self ++= xs; this } override def sizeHint(size: Int): Unit = self.sizeHint(size) def result(): NewTo = f(self.result()) override def knownSize: Int = self.knownSize diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala index b9598904375d..152b6cc9ffc7 100644 --- a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala +++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala @@ -13,34 +13,37 @@ package scala package collection package mutable +import language.experimental.captureChecking private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + this: CheckedIndexedSeqView[A]^ => + protected val mutationCount: () => Int - override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) - override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) - - override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) - override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) - override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) - override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) - override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) - override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) - override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) - override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) - - override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) + override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) } private[mutable] object CheckedIndexedSeqView { import IndexedSeqView.SomeIndexedSeqOps @SerialVersionUID(3L) - private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) extends IndexedSeqView.IndexedSeqViewIterator[A](self) { private[this] val expectedCount = mutationCount override def hasNext: Boolean = { @@ -50,7 +53,7 @@ private[mutable] object CheckedIndexedSeqView { } @SerialVersionUID(3L) - private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { private[this] val expectedCount = mutationCount override def hasNext: Boolean = { @@ -60,43 +63,43 @@ private[mutable] object CheckedIndexedSeqView { } @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int) extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int) extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { override def reverse: IndexedSeqView[A] = underlying match { case x: IndexedSeqView[A] => x @@ -105,7 +108,7 @@ private[mutable] object CheckedIndexedSeqView { } @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int) extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { protected val lo = from max 0 protected val hi = (until max 0) min underlying.length diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala index 940ecf3549ad..39149e98cbf0 100644 --- a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala @@ -11,7 +11,7 @@ */ package scala.collection.mutable - +import language.experimental.captureChecking /** A trait for cloneable collections. * diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala index 8542b5b56a01..2b27efb6eac1 100644 --- a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala @@ -18,6 +18,7 @@ import scala.annotation.{implicitNotFound, tailrec, unused} import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.DefaultSerializationProxy import scala.runtime.Statics +import language.experimental.captureChecking /** This class implements mutable maps using a hashtable with red-black trees in the buckets for good * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality @@ -32,7 +33,7 @@ import scala.runtime.Statics * @define mayNotTerminateInf * @define willNotTerminateInf */ -final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) +final class CollisionProofHashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) extends AbstractMap[K, V] with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] @@ -63,7 +64,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) - override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] @@ -173,7 +174,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { val k = xs.knownSize if(k > 0) sizeHint(contentSize + k) super.addAll(xs) @@ -442,13 +443,13 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = sortedMapFactory.from(new View.Collect(this, pf)) - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = @@ -743,17 +744,17 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." - def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + def from[sealed K : Ordering, sealed V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = { val k = it.knownSize val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it } - def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + def empty[sealed K : Ordering, sealed V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] - def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + def newBuilder[sealed K : Ordering, sealed V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + def newBuilder[sealed K : Ordering, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { override def sizeHint(size: Int) = elems.sizeHint(size) } @@ -765,8 +766,8 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { final def defaultInitialCapacity: Int = 16 @SerialVersionUID(3L) - private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) } @@ -788,7 +789,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { /////////////////////////// Red-Black Tree Node - final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + final class RBNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { @@ -819,17 +820,17 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { } } - @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + @`inline` private def leaf[sealed A, sealed B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = new RBNode(key, hash, value, red, null, null, parent) - @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + @tailrec private def minNodeNonNull[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = if (node.left eq null) node else minNodeNonNull(node.left) /** * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, * therefore, the last node), this method returns `null`. */ - private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + private def successor[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = { if (node.right ne null) minNodeNonNull(node.right) else { var x = node @@ -842,7 +843,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { } } - private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private final class RBNodesIterator[sealed A, sealed B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) def hasNext: Boolean = nextNode ne null @@ -858,7 +859,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { /////////////////////////// Linked List Node - private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + private final class LLNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { override def toString = s"LLNode($key, $value, $hash) -> $next" private[this] def eq(a: Any, b: Any): Boolean = diff --git a/tests/pos-special/stdlib/collection/mutable/Growable.scala b/tests/pos-special/stdlib/collection/mutable/Growable.scala index 914742b9013a..3b5eabac37bf 100644 --- a/tests/pos-special/stdlib/collection/mutable/Growable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Growable.scala @@ -14,6 +14,8 @@ package scala package collection package mutable +import language.experimental.captureChecking + /** This trait forms part of collections that can be augmented * using a `+=` operator and that can be cleared of all elements using * a `clear` method. @@ -54,7 +56,7 @@ trait Growable[-A] extends Clearable { * @param xs the IterableOnce producing the elements to $add. * @return the $coll itself. */ - def addAll(xs: IterableOnce[A]): this.type = { + def addAll(xs: IterableOnce[A]^): this.type = { if (xs.asInstanceOf[AnyRef] eq this) addAll(Buffer.from(xs)) // avoid mutating under our own iterator else { val it = xs.iterator @@ -66,7 +68,7 @@ trait Growable[-A] extends Clearable { } /** Alias for `addAll` */ - @`inline` final def ++= (xs: IterableOnce[A]): this.type = addAll(xs) + @`inline` final def ++= (xs: IterableOnce[A]^): this.type = addAll(xs) /** @return The number of elements in the collection under construction, if it can be cheaply computed, * -1 otherwise. The default implementation always returns -1. @@ -83,7 +85,7 @@ object Growable { * @tparam A Element type * @return The filled instance */ - def from[A](empty: Growable[A], it: collection.IterableOnce[A]): empty.type = empty ++= it + def from[A](empty: Growable[A], it: collection.IterableOnce[A]^): empty.type = empty ++= it } diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala index 7e945dffb99e..4d6f989e6f3d 100644 --- a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala @@ -12,7 +12,7 @@ package scala package collection.mutable - +import language.experimental.captureChecking /** The canonical builder for collections that are growable, i.e. that support an * efficient `+=` method which adds an element to the collection. @@ -31,7 +31,7 @@ class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) def addOne(elem: Elem): this.type = { elems += elem; this } - override def addAll(xs: IterableOnce[Elem]): this.type = { elems.addAll(xs); this } + override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this } override def knownSize: Int = elems.knownSize } diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala index 7ad3cf3869e8..ab45e7ffc73d 100644 --- a/tests/pos-special/stdlib/collection/mutable/HashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala @@ -17,6 +17,7 @@ import scala.annotation.{nowarn, tailrec} import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializationProxy import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable maps using a hashtable. * @@ -32,7 +33,7 @@ import scala.util.hashing.MurmurHash3 * @define willNotTerminateInf */ @deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") -class HashMap[K, V](initialCapacity: Int, loadFactor: Double) +class HashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double) extends AbstractMap[K, V] with MapOps[K, V, HashMap, HashMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] @@ -94,7 +95,7 @@ class HashMap[K, V](initialCapacity: Int, loadFactor: Double) if(target > table.length) growTable(target) } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { sizeHint(xs.knownSize) xs match { @@ -182,7 +183,7 @@ class HashMap[K, V](initialCapacity: Int, loadFactor: Double) } } - override def subtractAll(xs: IterableOnce[K]): this.type = { + override def subtractAll(xs: IterableOnce[K]^): this.type = { if (size == 0) { return this } @@ -596,17 +597,17 @@ class HashMap[K, V](initialCapacity: Int, loadFactor: Double) @SerialVersionUID(3L) object HashMap extends MapFactory[HashMap] { - def empty[K, V]: HashMap[K, V] = new HashMap[K, V] + def empty[sealed K, sealed V]: HashMap[K, V] = new HashMap[K, V] - def from[K, V](it: collection.IterableOnce[(K, V)]): HashMap[K, V] = { + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = { val k = it.knownSize val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity new HashMap[K, V](cap, defaultLoadFactor).addAll(it) } - def newBuilder[K, V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + def newBuilder[sealed K, sealed V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + def newBuilder[sealed K, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { override def sizeHint(size: Int) = elems.sizeHint(size) } @@ -618,8 +619,8 @@ object HashMap extends MapFactory[HashMap] { final def defaultInitialCapacity: Int = 16 @SerialVersionUID(3L) - private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) } diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala index 425721a41626..e8c055ff15ef 100644 --- a/tests/pos-special/stdlib/collection/mutable/HashSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala @@ -17,6 +17,7 @@ import scala.annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializationProxy import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable sets using a hashtable. * @@ -28,7 +29,7 @@ import scala.util.hashing.MurmurHash3 * @define mayNotTerminateInf * @define willNotTerminateInf */ -final class HashSet[A](initialCapacity: Int, loadFactor: Double) +final class HashSet[sealed A](initialCapacity: Int, loadFactor: Double) extends AbstractSet[A] with SetOps[A, HashSet, HashSet[A]] with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] @@ -90,7 +91,7 @@ final class HashSet[A](initialCapacity: Int, loadFactor: Double) addElem(elem, computeHash(elem)) } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { sizeHint(xs.knownSize) xs match { case hs: immutable.HashSet[A] => @@ -114,7 +115,7 @@ final class HashSet[A](initialCapacity: Int, loadFactor: Double) } } - override def subtractAll(xs: IterableOnce[A]): this.type = { + override def subtractAll(xs: IterableOnce[A]^): this.type = { if (size == 0) { return this } @@ -406,17 +407,17 @@ final class HashSet[A](initialCapacity: Int, loadFactor: Double) @SerialVersionUID(3L) object HashSet extends IterableFactory[HashSet] { - def from[B](it: scala.collection.IterableOnce[B]): HashSet[B] = { + def from[sealed B](it: scala.collection.IterableOnce[B]^): HashSet[B] = { val k = it.knownSize val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity new HashSet[B](cap, defaultLoadFactor) ++= it } - def empty[A]: HashSet[A] = new HashSet[A] + def empty[sealed A]: HashSet[A] = new HashSet[A] - def newBuilder[A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + def newBuilder[sealed A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + def newBuilder[sealed A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { override def sizeHint(size: Int) = elems.sizeHint(size) } @@ -428,8 +429,8 @@ object HashSet extends IterableFactory[HashSet] { final def defaultInitialCapacity: Int = 16 @SerialVersionUID(3L) - private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + private final class DeserializationFactory[sealed A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) } diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala index 4153bd532163..a3534e322cf3 100644 --- a/tests/pos-special/stdlib/collection/mutable/HashTable.scala +++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala @@ -19,6 +19,7 @@ import java.lang.Integer.{numberOfLeadingZeros, rotateRight} import scala.util.hashing.byteswap32 import java.lang.Integer +import language.experimental.captureChecking /** This class can be used to construct data structures that are based * on hashtables. Class `HashTable[A]` implements a hashtable @@ -36,7 +37,7 @@ import java.lang.Integer * @tparam A type of the elements contained in this hash table. */ // Not used in the standard library, but used in scala-parallel-collections -private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { +private[collection] trait HashTable[sealed A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { // Replacing Entry type parameter by abstract type member here allows to not expose to public // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. // However, I'm afraid it's too late now for such breaking change. @@ -411,7 +412,7 @@ private[collection] object HashTable { /** Class used internally. */ -private[collection] trait HashEntry[A, E <: HashEntry[A, E]] { +private[collection] trait HashEntry[A, sealed E <: HashEntry[A, E]] { val key: A var next: E = _ } diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala index c801f073fb0d..1af98162e9f3 100644 --- a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala index 24d54905de22..022970b4c56f 100644 --- a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala @@ -12,6 +12,7 @@ package scala.collection package mutable +import language.experimental.captureChecking trait IndexedSeq[T] extends Seq[T] with scala.collection.IndexedSeq[T] diff --git a/tests/pos-special/stdlib/collection/mutable/Iterable.scala b/tests/pos-special/stdlib/collection/mutable/Iterable.scala index d05aeed88044..bf286157b376 100644 --- a/tests/pos-special/stdlib/collection/mutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Iterable.scala @@ -13,11 +13,13 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults} +import language.experimental.captureChecking trait Iterable[A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } @@ -31,4 +33,5 @@ trait Iterable[A] object Iterable extends IterableFactory.Delegate[Iterable](ArrayBuffer) /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A] +abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]: + this: AbstractIterable[A]^ => diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala index bc663f1d37d8..a253e8738b26 100644 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala @@ -17,6 +17,7 @@ package mutable import scala.annotation.{nowarn, tailrec} import scala.collection.generic.DefaultSerializable import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable maps using a hashtable. @@ -33,7 +34,7 @@ import scala.util.hashing.MurmurHash3 * @define orderDependentFold */ @deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") -class LinkedHashMap[K, V] +class LinkedHashMap[sealed K, sealed V] extends AbstractMap[K, V] with SeqMap[K, V] with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] @@ -476,20 +477,20 @@ class LinkedHashMap[K, V] @SerialVersionUID(3L) object LinkedHashMap extends MapFactory[LinkedHashMap] { - def empty[K, V] = new LinkedHashMap[K, V] + def empty[sealed K, sealed V] = new LinkedHashMap[K, V] - def from[K, V](it: collection.IterableOnce[(K, V)]) = { + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^) = { val newlhm = empty[K, V] newlhm.sizeHint(it.knownSize) newlhm.addAll(it) newlhm } - def newBuilder[K, V] = new GrowableBuilder(empty[K, V]) + def newBuilder[sealed K, sealed V] = new GrowableBuilder(empty[K, V]) /** Class for the linked hash map entry, used internally. */ - private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { + private[mutable] final class LinkedEntry[sealed K, sealed V](val key: K, val hash: Int, var value: V) { var earlier: LinkedEntry[K, V] = null var later: LinkedEntry[K, V] = null var next: LinkedEntry[K, V] = null diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala index 0c01f8ea79ea..a895034a852c 100644 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala @@ -17,6 +17,7 @@ package mutable import scala.annotation.{nowarn, tailrec} import scala.collection.generic.DefaultSerializable import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable sets using a hashtable. * The iterator and all traversal methods of this class visit elements in the order they were inserted. @@ -31,7 +32,7 @@ import scala.util.hashing.MurmurHash3 * @define orderDependentFold */ @deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") -class LinkedHashSet[A] +class LinkedHashSet[sealed A] extends AbstractSet[A] with SetOps[A, LinkedHashSet, LinkedHashSet[A]] with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] @@ -314,20 +315,20 @@ class LinkedHashSet[A] @SerialVersionUID(3L) object LinkedHashSet extends IterableFactory[LinkedHashSet] { - override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + override def empty[sealed A]: LinkedHashSet[A] = new LinkedHashSet[A] - def from[E](it: collection.IterableOnce[E]) = { + def from[sealed E](it: collection.IterableOnce[E]^) = { val newlhs = empty[E] newlhs.sizeHint(it.knownSize) newlhs.addAll(it) newlhs } - def newBuilder[A] = new GrowableBuilder(empty[A]) + def newBuilder[sealed A] = new GrowableBuilder(empty[A]) /** Class for the linked hash set entry, used internally. */ - private[mutable] final class Entry[A](val key: A, val hash: Int) { + private[mutable] final class Entry[sealed A](val key: A, val hash: Int) { var earlier: Entry[A] = null var later: Entry[A] = null var next: Entry[A] = null diff --git a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala index d66525763163..4f607c770130 100644 --- a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala @@ -19,6 +19,8 @@ import java.lang.{IllegalArgumentException, IndexOutOfBoundsException} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence +import scala.annotation.unchecked.uncheckedCaptures +import language.experimental.captureChecking /** A `Buffer` implementation backed by a list. It provides constant time * prepend and append. Most other operations are linear. @@ -36,7 +38,7 @@ import scala.runtime.Statics.releaseFence * @define willNotTerminateInf */ @SerialVersionUID(-8428291952499836345L) -class ListBuffer[A] +class ListBuffer[sealed A] extends AbstractBuffer[A] with SeqOps[A, ListBuffer, ListBuffer[A]] with StrictOptimizedSeqOps[A, ListBuffer, ListBuffer[A]] @@ -121,7 +123,7 @@ class ListBuffer[A] } // MUST only be called on fresh instances - private def freshFrom(xs: IterableOnce[A]): this.type = { + private def freshFrom(xs: IterableOnce[A]^): this.type = { val it = xs.iterator if (it.hasNext) { var len = 1 @@ -140,7 +142,7 @@ class ListBuffer[A] this } - override final def addAll(xs: IterableOnce[A]): this.type = { + override final def addAll(xs: IterableOnce[A]^): this.type = { val it = xs.iterator if (it.hasNext) { val fresh = new ListBuffer[A].freshFrom(it) @@ -248,7 +250,7 @@ class ListBuffer[A] } } - def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { if (idx < 0 || idx > len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") val it = elems.iterator if (it.hasNext) { @@ -305,7 +307,7 @@ class ListBuffer[A] this } - def flatMapInPlace(f: A => IterableOnce[A]): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { mutationCount += 1 var src = first var dst: List[A] = null @@ -345,7 +347,7 @@ class ListBuffer[A] this } - def patchInPlace(from: Int, patch: collection.IterableOnce[A], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: collection.IterableOnce[A]^, replaced: Int): this.type = { val _len = len val _from = math.max(from, 0) // normalized val _replaced = math.max(replaced, 0) // normalized @@ -395,9 +397,9 @@ class ListBuffer[A] @SerialVersionUID(3L) object ListBuffer extends StrictOptimizedSeqFactory[ListBuffer] { - def from[A](coll: collection.IterableOnce[A]): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) + def from[sealed A](coll: collection.IterableOnce[A]^): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) - def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) + def newBuilder[sealed A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) - def empty[A]: ListBuffer[A] = new ListBuffer[A] + def empty[A]: ListBuffer[A] = new ListBuffer[A @uncheckedCaptures] } diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala index 7cc5aa227757..8ddbc264e47b 100644 --- a/tests/pos-special/stdlib/collection/mutable/ListMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala @@ -16,6 +16,7 @@ package mutable import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable import scala.collection.immutable.List +import language.experimental.captureChecking /** A simple mutable map backed by a list, so it preserves insertion order. * @@ -30,7 +31,7 @@ import scala.collection.immutable.List * @define orderDependentFold */ @deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") -class ListMap[K, V] +class ListMap[sealed K, sealed V] extends AbstractMap[K, V] with MapOps[K, V, ListMap, ListMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] @@ -76,7 +77,7 @@ class ListMap[K, V] @SerialVersionUID(3L) @deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") object ListMap extends MapFactory[ListMap] { - def empty[K, V]: ListMap[K, V] = new ListMap[K, V] - def from[K, V](it: IterableOnce[(K, V)]): ListMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[K, V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) + def empty[sealed K, sealed V]: ListMap[K, V] = new ListMap[K, V] + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[sealed K, sealed V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) } diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala index af34ca4ab8c9..2c757160ec77 100644 --- a/tests/pos-special/stdlib/collection/mutable/LongMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala @@ -15,6 +15,7 @@ package mutable import scala.collection.generic.DefaultSerializationProxy import scala.language.implicitConversions +import language.experimental.captureChecking /** This class implements mutable maps with `Long` keys based on a hash table with open addressing. * @@ -36,7 +37,7 @@ import scala.language.implicitConversions * rapidly as 2^30 is approached. * */ -final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) +final class LongMap[sealed V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean) extends AbstractMap[Long, V] with MapOps[Long, V, Map, LongMap[V]] with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] @@ -46,7 +47,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff def this() = this(LongMap.exceptionDefault, 16, true) // TODO: override clear() with an optimization more tailored for efficiency. - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]): LongMap[V] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = { //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? val b = newSpecificBuilder b.sizeHint(coll) @@ -56,7 +57,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) + def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true) /** Creates a new `LongMap` with an initial buffer of specified size. * @@ -66,7 +67,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) /** Creates a new `LongMap` with specified default values and initial buffer size. */ - def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) private[this] var mask = 0 private[this] var extraKeys: Int = 0 @@ -468,18 +469,18 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + override def + [sealed V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { val m = this + elem1 + elem2 if(elems.isEmpty) m else m.concat(elems) } - override def concat[V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = { + override def concat[sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = { val lm = clone().asInstanceOf[LongMap[V1]] xs.iterator.foreach(kv => lm += kv) lm } - override def ++ [V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(xs) + override def ++ [sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs) @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = @@ -519,7 +520,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff * Unlike `mapValues`, this method generates a new * collection immediately. */ - def mapValuesNow[V1](f: V => V1): LongMap[V1] = { + def mapValuesNow[sealed V1](f: V => V1): LongMap[V1] = { val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) @@ -562,11 +563,11 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff this } - def map[V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + def map[sealed V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) - def flatMap[V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + def flatMap[sealed V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - def collect[V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + def collect[sealed V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = strictOptimizedCollect(LongMap.newBuilder[V2], pf) protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) @@ -580,13 +581,13 @@ object LongMap { private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) + private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString) /** A builder for instances of `LongMap`. * * This builder can be reused to create multiple instances. */ - final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { + final class LongMapBuilder[sealed V] extends ReusableBuilder[(Long, V), LongMap[V]] { private[collection] var elems: LongMap[V] = new LongMap[V] override def addOne(entry: (Long, V)): this.type = { elems += entry @@ -598,9 +599,9 @@ object LongMap { } /** Creates a new `LongMap` with zero or more key/value pairs. */ - def apply[V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + def apply[sealed V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) - private def buildFromIterableOnce[V](elems: IterableOnce[(Long, V)]): LongMap[V] = { + private def buildFromIterableOnce[sealed V](elems: IterableOnce[(Long, V)]^): LongMap[V] = { var sz = elems.knownSize if(sz < 0) sz = 4 val lm = new LongMap[V](sz * 2) @@ -610,10 +611,10 @@ object LongMap { } /** Creates a new empty `LongMap`. */ - def empty[V]: LongMap[V] = new LongMap[V] + def empty[sealed V]: LongMap[V] = new LongMap[V] /** Creates a new empty `LongMap` with the supplied default */ - def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) + def withDefault[sealed V](default: Long -> V): LongMap[V] = new LongMap[V](default) /** Creates a new `LongMap` from an existing source collection. A source collection * which is already a `LongMap` gets cloned. @@ -622,17 +623,17 @@ object LongMap { * @tparam A the type of the collection’s elements * @return a new `LongMap` with the elements of `source` */ - def from[V](source: IterableOnce[(Long, V)]): LongMap[V] = source match { + def from[sealed V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match { case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] case _ => buildFromIterableOnce(source) } - def newBuilder[V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + def newBuilder[sealed V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] /** Creates a new `LongMap` from arrays of keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ - def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { + def fromZip[sealed V](keys: Array[Long], values: Array[V]): LongMap[V] = { val sz = math.min(keys.length, values.length) val lm = new LongMap[V](sz * 2) var i = 0 @@ -644,7 +645,7 @@ object LongMap { /** Creates a new `LongMap` from keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ - def fromZip[V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { + def fromZip[sealed V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { val sz = math.min(keys.size, values.size) val lm = new LongMap[V](sz * 2) val ki = keys.iterator @@ -654,20 +655,20 @@ object LongMap { lm } - implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + implicit def toFactory[sealed V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] } implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] } - implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def iterableFactory[sealed V]: Factory[(Long, V), LongMap[V]] = toFactory(this) implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) } diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala index 610dc01029cc..dab64ddb1f58 100644 --- a/tests/pos-special/stdlib/collection/mutable/Map.scala +++ b/tests/pos-special/stdlib/collection/mutable/Map.scala @@ -14,6 +14,8 @@ package scala package collection package mutable +import language.experimental.captureChecking + /** Base type of mutable Maps */ trait Map[K, V] extends Iterable[(K, V)] @@ -44,7 +46,7 @@ trait Map[K, V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - def withDefault(d: K => V): Map[K, V] = new Map.WithDefault[K, V](this, d) + def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -68,7 +70,8 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] with Cloneable[C] with Builder[(K, V), C] with Growable[(K, V)] - with Shrinkable[K] { + with Shrinkable[K] + with Pure { def result(): C = coll @@ -231,7 +234,7 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] object Map extends MapFactory.Delegate[Map](HashMap) { @SerialVersionUID(3L) - class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K => V) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V) extends AbstractMap[K, V] with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { @@ -250,12 +253,12 @@ object Map extends MapFactory.Delegate[Map](HashMap) { def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): Map[K, V2] = + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] = underlying.concat(suffix).withDefault(defaultValue) override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = new WithDefault[K, V](mapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala index 13d7c35e0165..281631c92298 100644 --- a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -12,6 +12,7 @@ package scala.collection.mutable +import language.experimental.captureChecking /** A trait for mutable maps with multiple values assigned to a key. * @@ -51,7 +52,7 @@ package scala.collection.mutable * @define Coll `MultiMap` */ @deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") -trait MultiMap[K, V] extends Map[K, Set[V]] { +trait MultiMap[K, sealed V] extends Map[K, Set[V]] { /** Creates a new set. * * Classes that use this trait as a mixin can override this method diff --git a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala index e98536d0dad5..3e9b16540031 100644 --- a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala +++ b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala @@ -15,6 +15,7 @@ package collection package mutable import java.util.ConcurrentModificationException +import language.experimental.captureChecking /** * Utilities to check that mutations to a client that tracks @@ -66,7 +67,7 @@ private object MutationTracker { * @param mutationCount a by-name provider of the current mutation count * @tparam A the type of the iterator's elements */ - final class CheckedIterator[A](underlying: Iterator[A], mutationCount: => Int) extends AbstractIterator[A] { + final class CheckedIterator[A](underlying: Iterator[A]^, mutationCount: => Int) extends AbstractIterator[A] { private[this] val expectedCount = mutationCount def hasNext: Boolean = { diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala index 22e99d4650d1..f1deb25b6a8a 100644 --- a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala @@ -16,6 +16,7 @@ package mutable import java.lang.Integer.numberOfLeadingZeros import java.util.ConcurrentModificationException import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking /** * @define Coll `OpenHashMap` @@ -25,10 +26,10 @@ import scala.collection.generic.DefaultSerializable @SerialVersionUID(3L) object OpenHashMap extends MapFactory[OpenHashMap] { - def empty[K, V] = new OpenHashMap[K, V] - def from[K, V](it: IterableOnce[(K, V)]): OpenHashMap[K,V] = empty ++= it + def empty[sealed K, sealed V] = new OpenHashMap[K, V] + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it - def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] = + def newBuilder[sealed K, sealed V]: Builder[(K, V), OpenHashMap[K,V]] = new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) /** A hash table entry. @@ -38,7 +39,7 @@ object OpenHashMap extends MapFactory[OpenHashMap] { * If its `key` is not the default value of type `Key`, the entry is occupied. * If the entry is occupied, `hash` contains the hash value of `key`. */ - final private class OpenEntry[Key, Value](var key: Key, + final private class OpenEntry[sealed Key, sealed Value](var key: Key, var hash: Int, var value: Option[Value]) @@ -61,7 +62,7 @@ object OpenHashMap extends MapFactory[OpenHashMap] { * @define willNotTerminateInf */ @deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") -class OpenHashMap[Key, Value](initialSize : Int) +class OpenHashMap[sealed Key, sealed Value](initialSize : Int) extends AbstractMap[Key, Value] with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala index 5572bdca3cf6..a395fac4a44a 100644 --- a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -15,6 +15,7 @@ package mutable import scala.collection.generic.DefaultSerializationProxy import scala.math.Ordering +import language.experimental.captureChecking /** A heap-based priority queue. * @@ -66,7 +67,7 @@ import scala.math.Ordering * @define mayNotTerminateInf * @define willNotTerminateInf */ -sealed class PriorityQueue[A](implicit val ord: Ordering[A]) +sealed class PriorityQueue[sealed A](implicit val ord: Ordering[A]) extends AbstractIterable[A] with Iterable[A] with IterableOps[A, Iterable, PriorityQueue[A]] @@ -77,7 +78,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) with Serializable { - private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + private class ResizableArrayAccess[sealed A0] extends ArrayBuffer[A0] { override def mapInPlace(f: A0 => A0): this.type = { var i = 1 // see "we do not use array(0)" comment below (???) val siz = this.size @@ -106,7 +107,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) override def isEmpty: Boolean = resarr.p_size0 < 2 // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) - override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll) override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder override def empty: PriorityQueue[A] = PriorityQueue.empty @@ -161,7 +162,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) this } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { val from = resarr.p_size0 for (x <- xs.iterator) unsafeAdd(x) heapify(from) @@ -364,7 +365,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) pq } - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if (copied > 0) { Array.copy(resarr.p_array, 1, xs, start, copied) @@ -383,7 +384,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) @SerialVersionUID(3L) object PriorityQueue extends SortedIterableFactory[PriorityQueue] { - def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + def newBuilder[sealed A : Ordering]: Builder[A, PriorityQueue[A]] = { new Builder[A, PriorityQueue[A]] { val pq = new PriorityQueue[A] def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } @@ -392,9 +393,9 @@ object PriorityQueue extends SortedIterableFactory[PriorityQueue] { } } - def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + def empty[sealed A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] - def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + def from[sealed E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = { val b = newBuilder[E] b ++= it b.result() diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala index 18cce0bd3852..a578b0742009 100644 --- a/tests/pos-special/stdlib/collection/mutable/Queue.scala +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -15,6 +15,7 @@ package mutable import scala.annotation.nowarn import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking /** `Queue` objects implement data structures that allow to @@ -27,7 +28,7 @@ import scala.collection.generic.DefaultSerializable * @define mayNotTerminateInf * @define willNotTerminateInf */ -class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) +class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) extends ArrayDeque[A](array, start, end) with IndexedSeqOps[A, Queue, Queue[A]] with StrictOptimizedSeqOps[A, Queue, Queue[A]] @@ -129,10 +130,10 @@ class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) @SerialVersionUID(3L) object Queue extends StrictOptimizedSeqFactory[Queue] { - def from[A](source: IterableOnce[A]): Queue[A] = empty ++= source + def from[sealed A](source: IterableOnce[A]^): Queue[A] = empty ++= source - def empty[A]: Queue[A] = new Queue + def empty[sealed A]: Queue[A] = new Queue - def newBuilder[A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) + def newBuilder[sealed A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) } diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala index 3ac0e1a1f797..1f320f832cdf 100644 --- a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala +++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala @@ -16,6 +16,7 @@ package collection.mutable import scala.annotation.tailrec import collection.{AbstractIterator, Iterator} import java.lang.String +import language.experimental.captureChecking /** * An object containing the red-black tree implementation used by mutable `TreeMaps`. @@ -31,25 +32,25 @@ private[collection] object RedBlackTree { // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) // on the size of the range. - final class Tree[A, B](var root: Node[A, B], var size: Int) { + final class Tree[sealed A, sealed B](var root: Node[A, B], var size: Int) { def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) } - final class Node[A, B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { + final class Node[sealed A, sealed B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" } object Tree { - def empty[A, B]: Tree[A, B] = new Tree(null, 0) + def empty[sealed A, sealed B]: Tree[A, B] = new Tree(null, 0) } object Node { - @`inline` def apply[A, B](key: A, value: B, red: Boolean, + @`inline` def apply[sealed A, sealed B](key: A, value: B, red: Boolean, left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = new Node(key, value, red, left, right, parent) - @`inline` def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + @`inline` def leaf[sealed A, sealed B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = new Node(key, value, red, null, null, parent) def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) @@ -180,7 +181,7 @@ private[collection] object RedBlackTree { // ---- insertion ---- - def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { + def insert[sealed A, sealed B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { var y: Node[A, B] = null var x = tree.root var cmp = 1 @@ -476,16 +477,16 @@ private[collection] object RedBlackTree { if (node.right ne null) transformNodeNonNull(node.right, f) } - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = + def iterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start, end) - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = + def keysIterator[sealed A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = new KeysIterator(tree, start, end) - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = + def valuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start, end) - private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] abstract class TreeIterator[sealed A, sealed B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) (implicit ord: Ordering[A]) extends AbstractIterator[R] { protected def nextResult(node: Node[A, B]): R @@ -513,19 +514,19 @@ private[collection] object RedBlackTree { setNullIfAfterEnd() } - private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] final class EntriesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) extends TreeIterator[A, B, (A, B)](tree, start, end) { def nextResult(node: Node[A, B]) = (node.key, node.value) } - private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] final class KeysIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) extends TreeIterator[A, B, A](tree, start, end) { def nextResult(node: Node[A, B]) = node.key } - private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] final class ValuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) extends TreeIterator[A, B, B](tree, start, end) { def nextResult(node: Node[A, B]) = node.value @@ -603,7 +604,7 @@ private[collection] object RedBlackTree { // building /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ - def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + def fromOrderedKeys[sealed A](xs: Iterator[A], size: Int): Tree[A, Null] = { val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes def f(level: Int, size: Int): Node[A, Null] = size match { case 0 => null @@ -622,7 +623,7 @@ private[collection] object RedBlackTree { } /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ - def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + def fromOrderedEntries[sealed A, sealed B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes def f(level: Int, size: Int): Node[A, B] = size match { case 0 => null @@ -642,7 +643,7 @@ private[collection] object RedBlackTree { new Tree(f(1, size), size) } - def copyTree[A, B](n: Node[A, B]): Node[A, B] = + def copyTree[sealed A, sealed B](n: Node[A, B]): Node[A, B] = if(n eq null) null else { val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) if(c.left != null) c.left.parent = c diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala index d7d3b6db4f09..246e525e37d9 100644 --- a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala @@ -14,6 +14,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** `ReusableBuilder` is a marker trait that indicates that a `Builder` * can be reused to build more than one instance of a collection. In diff --git a/tests/pos-special/stdlib/collection/mutable/Seq.scala b/tests/pos-special/stdlib/collection/mutable/Seq.scala index e83d79987208..443eec379c1b 100644 --- a/tests/pos-special/stdlib/collection/mutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/mutable/Seq.scala @@ -13,6 +13,7 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, SeqFactory} +import language.experimental.captureChecking trait Seq[A] extends Iterable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala index 67066f99e07e..5740490223b2 100644 --- a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** * A generic trait for ordered mutable maps. Concrete classes have to provide diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala index 6530e8fedf05..01384e993e89 100644 --- a/tests/pos-special/stdlib/collection/mutable/Set.scala +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -13,6 +13,7 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} +import language.experimental.captureChecking /** Base trait for mutable sets */ trait Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala index 006a3b88e49f..de2a24ecf01f 100644 --- a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala @@ -14,6 +14,7 @@ package scala package collection.mutable import scala.annotation.tailrec +import language.experimental.captureChecking /** This trait forms part of collections that can be reduced * using a `-=` operator. @@ -52,7 +53,7 @@ trait Shrinkable[-A] { * @param xs the iterator producing the elements to remove. * @return the $coll itself */ - def subtractAll(xs: collection.IterableOnce[A]): this.type = { + def subtractAll(xs: collection.IterableOnce[A]^): this.type = { @tailrec def loop(xs: collection.LinearSeq[A]): Unit = { if (xs.nonEmpty) { subtractOne(xs.head) @@ -74,6 +75,6 @@ trait Shrinkable[-A] { } /** Alias for `subtractAll` */ - @`inline` final def --= (xs: collection.IterableOnce[A]): this.type = subtractAll(xs) + @`inline` final def --= (xs: collection.IterableOnce[A]^): this.type = subtractAll(xs) } diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala index eb2f0d231b7a..8017177f5720 100644 --- a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -14,6 +14,7 @@ package scala package collection.mutable import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} +import language.experimental.captureChecking /** * Base type for mutable sorted map collections @@ -37,7 +38,7 @@ trait SortedMap[K, V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - override def withDefault(d: K => V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) + override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -66,7 +67,7 @@ trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { @SerialVersionUID(3L) - final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K => V) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V) extends Map.WithDefault[K, V](underlying, defaultValue) with SortedMap[K, V] with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] @@ -91,10 +92,10 @@ object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] = underlying.concat(suffix).withDefault(defaultValue) - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala index 2bcb8dc7845a..e657fb749d7d 100644 --- a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** * Base type for mutable sorted set collections diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala index 675666bc805c..4efa9621f374 100644 --- a/tests/pos-special/stdlib/collection/mutable/Stack.scala +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -16,6 +16,8 @@ import scala.annotation.{migration, nowarn} import scala.collection.generic.DefaultSerializable import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} +import language.experimental.captureChecking + /** A stack implements a data structure which allows to store and retrieve * objects in a last-in-first-out (LIFO) fashion. * @@ -33,7 +35,7 @@ import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, Stri * @define willNotTerminateInf */ @migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") -class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) +class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) extends ArrayDeque[A](array, start, end) with IndexedSeqOps[A, Stack, Stack[A]] with StrictOptimizedSeqOps[A, Stack, Stack[A]] @@ -133,10 +135,10 @@ class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) @SerialVersionUID(3L) object Stack extends StrictOptimizedSeqFactory[Stack] { - def from[A](source: IterableOnce[A]): Stack[A] = empty ++= source + def from[sealed A](source: IterableOnce[A]^): Stack[A] = empty ++= source - def empty[A]: Stack[A] = new Stack + def empty[sealed A]: Stack[A] = new Stack - def newBuilder[A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) + def newBuilder[sealed A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) } diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index 1d8b9563e917..5320fa1dabb0 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -14,6 +14,7 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, IterableOnce} import scala.collection.immutable.WrappedString +import language.experimental.captureChecking import scala.Predef.{ // unimport char-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -81,7 +82,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr // Methods required to make this an IndexedSeq: def apply(i: Int): Char = underlying.charAt(i) - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): StringBuilder = + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): StringBuilder = new StringBuilder() appendAll coll override protected def newSpecificBuilder: Builder[Char, StringBuilder] = @@ -109,7 +110,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr override def toString: String = result() - override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + override def toArray[sealed B >: Char](implicit ct: scala.reflect.ClassTag[B]) = ct.runtimeClass match { case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] case _ => super.toArray @@ -184,7 +185,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @param xs the characters to be appended. * @return this StringBuilder. */ - def appendAll(xs: IterableOnce[Char]): this.type = { + def appendAll(xs: IterableOnce[Char]^): this.type = { xs match { case x: WrappedString => underlying append x.unwrap case x: ArraySeq.ofChar => underlying append x.array @@ -313,7 +314,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @return this StringBuilder. * @throws StringIndexOutOfBoundsException if the index is out of bounds. */ - def insertAll(index: Int, xs: IterableOnce[Char]): this.type = + def insertAll(index: Int, xs: IterableOnce[Char]^): this.type = insertAll(index, (ArrayBuilder.make[Char] ++= xs).result()) /** Inserts the given Array[Char] into this sequence at the given index. diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala index 1af968a08ac3..f714a9ed46c2 100644 --- a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala @@ -17,6 +17,7 @@ package mutable import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{RedBlackTree => RB} +import language.experimental.captureChecking /** * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. @@ -28,7 +29,7 @@ import scala.collection.mutable.{RedBlackTree => RB} * @define Coll mutable.TreeMap * @define coll mutable tree map */ -sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) +sealed class TreeMap[sealed K, sealed V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) extends AbstractMap[K, V] with SortedMap[K, V] with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] @@ -247,11 +248,11 @@ sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: @SerialVersionUID(3L) object TreeMap extends SortedMapFactory[TreeMap] { - def from[K : Ordering, V](it: IterableOnce[(K, V)]): TreeMap[K, V] = + def from[sealed K : Ordering, sealed V](it: IterableOnce[(K, V)]^): TreeMap[K, V] = Growable.from(empty[K, V], it) - def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() + def empty[sealed K : Ordering, sealed V]: TreeMap[K, V] = new TreeMap[K, V]() - def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + def newBuilder[sealed K: Ordering, sealed V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) } diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala index bed474dc02a3..9ba439bea041 100644 --- a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -17,6 +17,7 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{RedBlackTree => RB} import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} +import language.experimental.captureChecking /** * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. @@ -28,7 +29,7 @@ import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Steppe * @define coll mutable tree set */ // Original API designed in part by Lucien Pereira -sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) +sealed class TreeSet[sealed A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) extends AbstractSet[A] with SortedSet[A] with SortedSetOps[A, TreeSet, TreeSet[A]] @@ -191,9 +192,9 @@ sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit va @SerialVersionUID(3L) object TreeSet extends SortedIterableFactory[TreeSet] { - def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + def empty[sealed A : Ordering]: TreeSet[A] = new TreeSet[A]() - def from[E](it: IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + def from[sealed E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = it match { case ts: TreeSet[E] if ordering == ts.ordering => new TreeSet[E](ts.tree.treeCopy()) @@ -209,7 +210,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { new TreeSet[E](t) } - def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + def newBuilder[sealed A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } def result(): TreeSet[A] = new TreeSet[A](tree) diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala index 489f2a1b0387..2015b76a31b8 100644 --- a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala @@ -17,6 +17,7 @@ import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable import scala.reflect.ClassTag import scala.collection.immutable.Nil +import language.experimental.captureChecking /** A buffer that stores elements in an unrolled linked list. * @@ -45,7 +46,7 @@ import scala.collection.immutable.Nil * */ @SerialVersionUID(3L) -sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) +sealed class UnrolledBuffer[sealed T](implicit val tag: ClassTag[T]) extends AbstractBuffer[T] with Buffer[T] with Seq[T] @@ -190,7 +191,7 @@ sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) def insert(idx: Int, elem: T): Unit = insertAll(idx, elem :: Nil) - def insertAll(idx: Int, elems: IterableOnce[T]): Unit = + def insertAll(idx: Int, elems: IterableOnce[T]^): Unit = if (idx >= 0 && idx <= sz) { sz += headptr.insertAll(idx, elems, this) } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") @@ -202,7 +203,7 @@ sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) this } - def patchInPlace(from: Int, patch: collection.IterableOnce[T], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = { remove(from, replaced) insertAll(from, patch) this @@ -240,11 +241,11 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) - def empty[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + def empty[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] - def from[A : ClassTag](source: scala.collection.IterableOnce[A]): UnrolledBuffer[A] = newBuilder[A].addAll(source) + def from[sealed A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source) - def newBuilder[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + def newBuilder[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] final val waterline: Int = 50 @@ -257,7 +258,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] /** Unrolled buffer node. */ - class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + class Unrolled[sealed T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) @@ -372,7 +373,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] if (next eq null) true else false // checks if last node was thrown out } else false - @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T], buffer: UnrolledBuffer[T]): Int = { + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = { if (idx < size) { // divide this node at the appropriate position and insert all into head // update new next @@ -436,7 +437,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] // This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: // Todo -- revisit whether inheritance is the best way to achieve this functionality -private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { +private[collection] class DoublingUnrolledBuffer[sealed T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) } diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala index 7286a318e1f9..a9498b7fc69b 100644 --- a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala @@ -16,6 +16,7 @@ package mutable import scala.annotation.nowarn import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} +import language.experimental.captureChecking /** A hash map with references to entries which are weakly reachable. Entries are * removed from this map when the key is no longer (strongly) referenced. This class wraps @@ -33,7 +34,7 @@ import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapper * @define willNotTerminateInf */ @SerialVersionUID(3L) -class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) +class WeakHashMap[sealed K, sealed V] extends JMapWrapper[K, V](new java.util.WeakHashMap) with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { override def empty = new WeakHashMap[K, V] @@ -48,8 +49,8 @@ class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) */ @SerialVersionUID(3L) object WeakHashMap extends MapFactory[WeakHashMap] { - def empty[K, V]: WeakHashMap[K,V] = new WeakHashMap[K, V] - def from[K, V](it: collection.IterableOnce[(K, V)]): WeakHashMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[K, V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) + def empty[sealed K, sealed V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[sealed K, sealed V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) } diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala index 4915e8a48b22..d658ca5bc65a 100644 --- a/tests/pos-special/stdlib/collection/mutable/package.scala +++ b/tests/pos-special/stdlib/collection/mutable/package.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking package object mutable { diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala index 954573ff1ddd..ad4686be1fb2 100644 --- a/tests/pos-special/stdlib/collection/package.scala +++ b/tests/pos-special/stdlib/collection/package.scala @@ -11,6 +11,7 @@ */ package scala +import language.experimental.captureChecking package object collection { @deprecated("Use Iterable instead of Traversable", "2.13.0") From e5fc8c819f82c4404ca21b16a48bc19e9a8a80a1 Mon Sep 17 00:00:00 2001 From: David Hua Date: Tue, 7 Nov 2023 01:53:39 +0100 Subject: [PATCH 162/216] Fix i18624 and add test case for it --- .../dotty/tools/dotc/transform/init/Objects.scala | 14 +++++++++++--- tests/init-global/pos/i18624.scala | 7 +++++++ 2 files changed, 18 insertions(+), 3 deletions(-) create mode 100644 tests/init-global/pos/i18624.scala diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 836218f302bc..b28fe6ce1a76 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -698,13 +698,21 @@ object Objects: case Fun(code, thisV, klass, env) => // meth == NoSymbol for poly functions - if meth.name.toString == "tupled" then + if meth.name == nme.tupled then value // a call like `fun.tupled` else code match case ddef: DefDef => - given Env.Data = Env.of(ddef, args.map(_.value), env) - extendTrace(code) { eval(ddef.rhs, thisV, klass, cacheResult = true) } + if meth.name == nme.apply then + given Env.Data = Env.of(ddef, args.map(_.value), env) + extendTrace(code) { eval(ddef.rhs, thisV, klass, cacheResult = true) } + else + meth.owner.asType.name match + case tpnme.Any | tpnme.AnyRef => + value + case _ => + Cold + end if case _ => // by-name closure diff --git a/tests/init-global/pos/i18624.scala b/tests/init-global/pos/i18624.scala new file mode 100644 index 000000000000..f2562d2da61b --- /dev/null +++ b/tests/init-global/pos/i18624.scala @@ -0,0 +1,7 @@ +def h(a: Int): Unit = { + +} + +object X { + println(h.getClass()) +} From 30ca28dba2f01deef9574e48736c25dcb6a41f56 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 7 Nov 2023 17:21:42 +0100 Subject: [PATCH 163/216] Avoid pure expression warning with Scala2 library TASTy The extra information on trait initialization causes an extra warning. The warning/errors in the check files should be the same. The test now do not desugar the erroneous code into a statement anymore to avoid this warning. --- tests/neg/i16601.check | 8 ++++---- tests/neg/i16601.scala | 2 +- tests/neg/i16601a.check | 8 ++++---- tests/neg/i16601a.scala | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/neg/i16601.check b/tests/neg/i16601.check index 25baef04e479..c2059506cb09 100644 --- a/tests/neg/i16601.check +++ b/tests/neg/i16601.check @@ -1,6 +1,6 @@ --- [E042] Type Error: tests/neg/i16601.scala:1:27 ---------------------------------------------------------------------- -1 |@main def Test: Unit = new concurrent.ExecutionContext // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | ExecutionContext is a trait; it cannot be instantiated +-- [E042] Type Error: tests/neg/i16601.scala:1:26 ---------------------------------------------------------------------- +1 |@main def Test: Any = new concurrent.ExecutionContext // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ExecutionContext is a trait; it cannot be instantiated | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16601.scala b/tests/neg/i16601.scala index 2e058db0093c..00d0c7c9b15e 100644 --- a/tests/neg/i16601.scala +++ b/tests/neg/i16601.scala @@ -1 +1 @@ -@main def Test: Unit = new concurrent.ExecutionContext // error \ No newline at end of file +@main def Test: Any = new concurrent.ExecutionContext // error \ No newline at end of file diff --git a/tests/neg/i16601a.check b/tests/neg/i16601a.check index f6ddd66ca107..6640f5b41749 100644 --- a/tests/neg/i16601a.check +++ b/tests/neg/i16601a.check @@ -1,7 +1,7 @@ --- [E042] Type Error: tests/neg/i16601a.scala:3:27 --------------------------------------------------------------------- -3 |@main def Test: Unit = new concurrent.ExecutionContext // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | ExecutionContext is a trait; it cannot be instantiated +-- [E042] Type Error: tests/neg/i16601a.scala:3:26 --------------------------------------------------------------------- +3 |@main def Test: Any = new concurrent.ExecutionContext // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ExecutionContext is a trait; it cannot be instantiated |--------------------------------------------------------------------------------------------------------------------- | Explanation (enabled by `-explain`) |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tests/neg/i16601a.scala b/tests/neg/i16601a.scala index 232f9c1c9d03..b85828122e8a 100644 --- a/tests/neg/i16601a.scala +++ b/tests/neg/i16601a.scala @@ -1,3 +1,3 @@ //> using options -explain -@main def Test: Unit = new concurrent.ExecutionContext // error \ No newline at end of file +@main def Test: Any = new concurrent.ExecutionContext // error \ No newline at end of file From a0699ae879836ce0e3aa259e2464fee172704c78 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 1 Nov 2023 17:19:46 +0100 Subject: [PATCH 164/216] Make `private[this]` a migration warning * In `3.4` we emit the deprecation warning and enable the patch with -rewrite. * In `future` we emit we make this syntax an error --- .../scala/dotty/communitybuild/projects.scala | 2 +- .../dotty/tools/dotc/core/TypeComparer.scala | 2 +- .../core/classfile/ReusableDataReader.scala | 8 +-- .../dotty/tools/dotc/parsing/Parsers.scala | 16 +++-- .../dotty/tools/dotc/reporting/WConf.scala | 2 +- .../dotc/semanticdb/generated/Access.scala | 12 ++-- .../semanticdb/generated/Annotation.scala | 4 +- .../dotc/semanticdb/generated/Constant.scala | 40 +++++------ .../semanticdb/generated/Diagnostic.scala | 4 +- .../semanticdb/generated/Documentation.scala | 4 +- .../dotc/semanticdb/generated/Location.scala | 4 +- .../dotc/semanticdb/generated/Range.scala | 4 +- .../dotc/semanticdb/generated/Scope.scala | 4 +- .../dotc/semanticdb/generated/Signature.scala | 20 +++--- .../generated/SymbolInformation.scala | 4 +- .../generated/SymbolOccurrence.scala | 4 +- .../dotc/semanticdb/generated/Synthetic.scala | 4 +- .../semanticdb/generated/TextDocument.scala | 4 +- .../semanticdb/generated/TextDocuments.scala | 4 +- .../dotc/semanticdb/generated/Tree.scala | 36 +++++----- .../dotc/semanticdb/generated/Type.scala | 72 +++++++++---------- .../dotty/tools/dotc/transform/Splicer.scala | 2 +- .../src/dotty/tools/dotc/util/Chars.scala | 2 +- .../tools/dotc/util/ReusableInstance.scala | 4 +- compiler/src/dotty/tools/io/ZipArchive.scala | 4 +- .../dotty/tools/runner/ScalaClassLoader.scala | 2 +- .../runtime/impl/printers/SourceCode.scala | 10 +-- .../dotty/tools/dotc/CompilationTests.scala | 1 + .../languageserver/DottyLanguageServer.scala | 8 +-- .../languageserver/worksheet/Evaluator.scala | 2 +- .../worksheet/InputStreamConsumer.scala | 2 +- .../tools/languageserver/util/Code.scala | 2 +- .../tools/languageserver/util/CodeRange.scala | 2 +- .../languageserver/util/PositionContext.scala | 4 +- .../util/server/TestClient.scala | 2 +- .../util/server/TestServer.scala | 2 +- library/src/scala/runtime/LazyVals.scala | 4 +- project/Build.scala | 3 + sbt-test/compilerReporter/i14576/build.sbt | 4 +- .../src/example/level2/Documentation.scala | 2 +- scaladoc-testcases/src/tests/visibility.scala | 2 + .../src/scala/quoted/staging/Compiler.scala | 4 +- .../scala/quoted/staging/QuoteCompiler.scala | 2 +- .../scala/quoted/staging/QuoteDriver.scala | 2 +- tests/init/neg/function11.scala | 4 +- .../captures/leaking-iterators.scala | 2 +- tests/neg-scalajs/js-native-members.check | 30 ++++++++ ...non-native-members-qualified-private.check | 6 ++ tests/neg/i15503c.scala | 2 +- tests/neg/i16639a.scala | 2 +- tests/neg/i17612a.scala | 2 +- tests/neg/i17612b/i17612b.scala | 2 +- tests/neg/nonunit-statement.scala | 2 +- tests/neg/private-this-3.4.check | 12 ++++ tests/neg/private-this-3.4.scala | 7 ++ tests/neg/private-this-future-migration.scala | 7 ++ tests/neg/private-this-future.scala | 5 ++ tests/pos/i6290.scala | 2 +- tests/pos/private-this-future-migration.scala | 5 ++ tests/pos/private-this.scala | 3 + tests/rewrites/private-this.check | 12 ++++ tests/rewrites/private-this.scala | 12 ++++ tests/semanticdb/metac.expect | 30 +++++++- 63 files changed, 305 insertions(+), 168 deletions(-) create mode 100644 tests/neg/private-this-3.4.check create mode 100644 tests/neg/private-this-3.4.scala create mode 100644 tests/neg/private-this-future-migration.scala create mode 100644 tests/neg/private-this-future.scala create mode 100644 tests/pos/private-this-future-migration.scala create mode 100644 tests/pos/private-this.scala create mode 100644 tests/rewrites/private-this.check create mode 100644 tests/rewrites/private-this.scala diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 94ee5ad44a8c..974800cdcce1 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -362,7 +362,7 @@ object projects: project = "shapeless-3", sbtTestCommand = "testJVM; testJS", sbtDocCommand = forceDoc("typeable", "deriving"), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), // due to -Xfatal-warnings + scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), // due to -Xfatal-warnings ) lazy val xmlInterpolator = SbtCommunityProject( diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 63915126861c..db9cb60ea2fb 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -160,7 +160,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * every time we compare components of the previous pair of types. * This type is used for capture conversion in `isSubArgs`. */ - private [this] var leftRoot: Type | Null = null + private var leftRoot: Type | Null = null /** Are we forbidden from recording GADT constraints? */ private var frozenGadt = false diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala index eb1649091f77..e9bb7337c948 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala @@ -9,10 +9,10 @@ import java.io.{DataInputStream, InputStream} import java.nio.{BufferUnderflowException, ByteBuffer} final class ReusableDataReader() extends DataReader { - private[this] var data = new Array[Byte](32768) - private[this] var bb: ByteBuffer = ByteBuffer.wrap(data) - private[this] var size = 0 - private[this] val reader: DataInputStream = { + private var data = new Array[Byte](32768) + private var bb: ByteBuffer = ByteBuffer.wrap(data) + private var size = 0 + private val reader: DataInputStream = { val stream = new InputStream { override def read(): Int = try { bb.get & 0xff diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 805c25ad40e4..d479c7de8cc7 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3112,15 +3112,23 @@ object Parsers { if (in.token == LBRACKET) { if (mods.is(Local) || mods.hasPrivateWithin) syntaxError(DuplicatePrivateProtectedQualifier()) - inBrackets { + val startOffset = in.offset + val mods1 = inBrackets { if in.token == THIS then - if sourceVersion.isAtLeast(future) then - deprecationWarning( - em"The [this] qualifier will be deprecated in the future; it should be dropped.") in.nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) } + if mods1.is(Local) then + report.gradualErrorOrMigrationWarning( + em"""The [this] qualifier will be deprecated in the future; it should be dropped. + |See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html${rewriteNotice(`3.4-migration`)}""", + in.sourcePos(), + warnFrom = `3.4`, + errorFrom = future) + if sourceVersion.isMigrating && sourceVersion.isAtLeast(`3.4-migration`) then + patch(source, Span(startOffset, in.lastOffset), "") + mods1 } else mods diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index 29b5bccb7714..cc0a63cb1532 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -117,7 +117,7 @@ object WConf: else Right(WConf(configs)) class Suppression(val annotPos: SourcePosition, filters: List[MessageFilter], val start: Int, end: Int, val verbose: Boolean): - private[this] var _used = false + private var _used = false def used: Boolean = _used def markUsed(): Unit = { _used = true } diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala index 2d2621c34390..69b8712878af 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala @@ -50,8 +50,8 @@ final case class AccessMessage( sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.privateAccess.isDefined) { val __value = sealedValue.privateAccess.get @@ -379,8 +379,8 @@ final case class PrivateWithinAccess( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -539,8 +539,8 @@ final case class ProtectedWithinAccess( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala index a4f076585a50..cf07e8c58747 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala @@ -13,8 +13,8 @@ final case class Annotation( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala index 91bbaa75e654..da8bf56455ef 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala @@ -58,8 +58,8 @@ final case class ConstantMessage( sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.unitConstant.isDefined) { val __value = sealedValue.unitConstant.get @@ -442,8 +442,8 @@ final case class BooleanConstant( value: _root_.scala.Boolean = false ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -520,8 +520,8 @@ final case class ByteConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -598,8 +598,8 @@ final case class ShortConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -676,8 +676,8 @@ final case class CharConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -754,8 +754,8 @@ final case class IntConstant( value: _root_.scala.Int = 0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -832,8 +832,8 @@ final case class LongConstant( value: _root_.scala.Long = 0L ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -910,8 +910,8 @@ final case class FloatConstant( value: _root_.scala.Float = 0.0f ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -988,8 +988,8 @@ final case class DoubleConstant( value: _root_.scala.Double = 0.0 ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1066,8 +1066,8 @@ final case class StringConstant( value: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala index 5917ab82f59f..43f9dca4d49b 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala @@ -15,8 +15,8 @@ final case class Diagnostic( message: _root_.scala.Predef.String = "" ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala index 695dea973016..256e8ae15f37 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala @@ -14,8 +14,8 @@ final case class Documentation( format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala index 756b7711d304..1072d25654f0 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala @@ -14,8 +14,8 @@ final case class Location( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala index 0f7436524ee1..5f1c0477e17d 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala @@ -16,8 +16,8 @@ final case class Range( endCharacter: _root_.scala.Int = 0 ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala index 7a2ee40478c4..44d273d25af4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala @@ -14,8 +14,8 @@ final case class Scope( hardlinks: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 symlinks.foreach { __item => val __value = __item diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala index 7a0331be0ed3..810ea9a792d4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala @@ -44,8 +44,8 @@ final case class SignatureMessage( sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.classSignature.isDefined) { val __value = sealedValue.classSignature.get @@ -222,8 +222,8 @@ final case class ClassSignature( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -372,8 +372,8 @@ final case class MethodSignature( returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -499,8 +499,8 @@ final case class TypeSignature( upperBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -628,8 +628,8 @@ final case class ValueSignature( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala index 92917cb23a41..d22504a51731 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala @@ -22,8 +22,8 @@ final case class SymbolInformation( documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala index 39a5228ed02d..e68a0b6b9efe 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala @@ -15,8 +15,8 @@ final case class SymbolOccurrence( role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala index 7916fdb2e07a..bb7bcacea092 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala @@ -14,8 +14,8 @@ final case class Synthetic( tree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala index 80322ec45e0e..723df545c4c5 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala @@ -21,8 +21,8 @@ final case class TextDocument( synthetics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala index a35bc23bf665..cab86417cfc9 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala @@ -13,8 +13,8 @@ final case class TextDocuments( documents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument] = _root_.scala.Seq.empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 documents.foreach { __item => val __value = __item diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala index 6a19494cd65a..310e9c010826 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala @@ -52,8 +52,8 @@ final case class TreeMessage( sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.applyTree.isDefined) { val __value = sealedValue.applyTree.get @@ -324,8 +324,8 @@ final case class ApplyTree( arguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -431,8 +431,8 @@ final case class FunctionTree( body: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 parameters.foreach { __item => val __value = __item @@ -535,8 +535,8 @@ final case class IdTree( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -613,8 +613,8 @@ final case class LiteralTree( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -696,8 +696,8 @@ final case class MacroExpansionTree( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -804,8 +804,8 @@ final case class OriginalTree( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (range.isDefined) { val __value = range.get @@ -882,8 +882,8 @@ final case class SelectTree( id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -986,8 +986,8 @@ final case class TypeApplyTree( typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala index 8f675e82b802..0b2a35a8e1cd 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala @@ -68,8 +68,8 @@ final case class TypeMessage( sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (sealedValue.typeRef.isDefined) { val __value = sealedValue.typeRef.get @@ -533,8 +533,8 @@ final case class TypeRef( typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -662,8 +662,8 @@ final case class SingleType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -766,8 +766,8 @@ final case class ThisType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -845,8 +845,8 @@ final case class SuperType( symbol: _root_.scala.Predef.String = "" ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -949,8 +949,8 @@ final case class ConstantType( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1031,8 +1031,8 @@ final case class IntersectionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__item) @@ -1111,8 +1111,8 @@ final case class UnionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__item) @@ -1191,8 +1191,8 @@ final case class WithType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__item) @@ -1272,8 +1272,8 @@ final case class StructuralType( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1376,8 +1376,8 @@ final case class AnnotatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 annotations.foreach { __item => val __value = __item @@ -1481,8 +1481,8 @@ final case class ExistentialType( declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1585,8 +1585,8 @@ final case class UniversalType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -1688,8 +1688,8 @@ final case class ByNameType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1770,8 +1770,8 @@ final case class RepeatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1853,8 +1853,8 @@ final case class MatchType( cases: _root_.scala.Seq[dotty.tools.dotc.semanticdb.MatchType.CaseType] = _root_.scala.Seq.empty ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -1944,8 +1944,8 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s body: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 { @@ -2067,8 +2067,8 @@ final case class LambdaType( returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { @transient @sharable - private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private var __serializedSizeMemoized: _root_.scala.Int = 0 + private def __computeSerializedSize(): _root_.scala.Int = { var __size = 0 if (parameters.isDefined) { val __value = parameters.get diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 1f7b65ba66a6..93ba1845e484 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -90,7 +90,7 @@ object Splicer { /** Checks that no symbol that was generated within the macro expansion has an out of scope reference */ def checkEscapedVariables(tree: Tree, expansionOwner: Symbol)(using Context): tree.type = new TreeTraverser { - private[this] var locals = Set.empty[Symbol] + private var locals = Set.empty[Symbol] private def markSymbol(sym: Symbol)(using Context): Unit = locals = locals + sym private def markDef(tree: Tree)(using Context): Unit = tree match { diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala index cde1a63f5293..916bdfa9dca3 100644 --- a/compiler/src/dotty/tools/dotc/util/Chars.scala +++ b/compiler/src/dotty/tools/dotc/util/Chars.scala @@ -28,7 +28,7 @@ object Chars: if (0 <= num && num < base) num else -1 } /** Buffer for creating '\ u XXXX' strings. */ - private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0) + private val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0) /** Convert a character to a backslash-u escape */ def char2uescape(c: Char): String = { diff --git a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala index ec88b5880745..d7837d9763fe 100644 --- a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala +++ b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala @@ -15,8 +15,8 @@ import scala.util.chaining.* * Ported from scala.reflect.internal.util.ReusableInstance */ final class ReusableInstance[T <: AnyRef] private (make: => T) { - private[this] val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) - private[this] var taken = 0 + private val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) + private var taken = 0 inline def withInstance[R](action: T => R): R ={ if (taken == cache.size) diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 9e6d5fe4796b..3a4d32614c82 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -214,7 +214,7 @@ final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArc case _ => false } - private[this] var closeables: List[java.io.Closeable] = Nil + private var closeables: List[java.io.Closeable] = Nil override def close(): Unit = { closeables.foreach(_.close) closeables = Nil @@ -281,7 +281,7 @@ final class ManifestResources(val url: URL) extends ZipArchive(null, None) { } } - private[this] var closeables: List[java.io.Closeable] = Nil + private var closeables: List[java.io.Closeable] = Nil override def close(): Unit = { closeables.foreach(_.close()) closeables = Nil diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index 2c0976fac1ac..a8cd36cba6bd 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -64,7 +64,7 @@ object ScalaClassLoader { def fromURLsParallelCapable(urls: Seq[URL], parent: ClassLoader | Null = null): URLClassLoader = new URLClassLoader(urls.toArray, if parent == null then bootClassLoader else parent) - @sharable private[this] val bootClassLoader: ClassLoader = + @sharable private val bootClassLoader: ClassLoader = if scala.util.Properties.isJavaAtLeast("9") then try ClassLoader.getSystemClassLoader.getParent diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 4dfb61a59722..b27016045051 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -52,7 +52,7 @@ object SourceCode { if (flags.is(Flags.Param)) flagList += "param" if (flags.is(Flags.ParamAccessor)) flagList += "paramAccessor" if (flags.is(Flags.Private)) flagList += "private" - if (flags.is(Flags.PrivateLocal)) flagList += "private[this]" + if (flags.is(Flags.PrivateLocal)) flagList += "private" if (flags.is(Flags.Protected)) flagList += "protected" if (flags.is(Flags.Scala2x)) flagList += "scala2x" if (flags.is(Flags.Sealed)) flagList += "sealed" @@ -67,9 +67,9 @@ object SourceCode { import syntaxHighlight.* import quotes.reflect.* - private[this] val sb: StringBuilder = new StringBuilder + private val sb: StringBuilder = new StringBuilder - private[this] var indent: Int = 0 + private var indent: Int = 0 private def indented(printIndented: => Unit): Unit = { indent += 1 printIndented @@ -1441,8 +1441,8 @@ object SourceCode { private def escapedString(str: String): String = str flatMap escapedChar - private[this] val names = collection.mutable.Map.empty[Symbol, String] - private[this] val namesIndex = collection.mutable.Map.empty[String, Int] + private val names = collection.mutable.Map.empty[Symbol, String] + private val namesIndex = collection.mutable.Map.empty[String, Int] private def splicedName(sym: Symbol): Option[String] = { if sym.owner.isClassDef then None diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index fa89c82fc7e7..0491660219b2 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -61,6 +61,7 @@ class CompilationTests { compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")), compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), + compileFile("tests/rewrites/private-this.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/filtering-fors.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/refutable-pattern-bindings.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/i8982.scala", defaultOptions.and("-indent", "-rewrite")), diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index 5c95da304966..e7f9c332aeeb 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -54,14 +54,14 @@ class DottyLanguageServer extends LanguageServer import lsp4j.jsonrpc.messages.{Either => JEither} import lsp4j._ - private[this] var rootUri: String = _ + private var rootUri: String = _ - private[this] var myClient: DottyClient = _ + private var myClient: DottyClient = _ def client: DottyClient = myClient - private[this] var myDrivers: mutable.Map[ProjectConfig, InteractiveDriver] = _ + private var myDrivers: mutable.Map[ProjectConfig, InteractiveDriver] = _ - private[this] var myDependentProjects: mutable.Map[ProjectConfig, mutable.Set[ProjectConfig]] = _ + private var myDependentProjects: mutable.Map[ProjectConfig, mutable.Set[ProjectConfig]] = _ def drivers: Map[ProjectConfig, InteractiveDriver] = thisServer.synchronized { if myDrivers == null then diff --git a/language-server/src/dotty/tools/languageserver/worksheet/Evaluator.scala b/language-server/src/dotty/tools/languageserver/worksheet/Evaluator.scala index b1949f201d15..1e3f4c886816 100644 --- a/language-server/src/dotty/tools/languageserver/worksheet/Evaluator.scala +++ b/language-server/src/dotty/tools/languageserver/worksheet/Evaluator.scala @@ -23,7 +23,7 @@ private object Evaluator { * The most recent Evaluator that was used. It can be reused if the user classpath hasn't changed * between two calls. */ - private[this] var previousEvaluator: Option[(String, Evaluator)] = None + private var previousEvaluator: Option[(String, Evaluator)] = None /** * Get a (possibly reused) Evaluator and set cancel checker. diff --git a/language-server/src/dotty/tools/languageserver/worksheet/InputStreamConsumer.scala b/language-server/src/dotty/tools/languageserver/worksheet/InputStreamConsumer.scala index baac4df9e88a..9f1fdabe1707 100644 --- a/language-server/src/dotty/tools/languageserver/worksheet/InputStreamConsumer.scala +++ b/language-server/src/dotty/tools/languageserver/worksheet/InputStreamConsumer.scala @@ -4,7 +4,7 @@ import java.io.{InputStream, IOException} import java.util.Scanner class InputStreamConsumer(in: InputStream) { - private[this] val scanner = + private val scanner = new Scanner(in).useDelimiter(InputStreamConsumer.delimiter) /** Finds and returns the next complete token from this input stream. diff --git a/language-server/test/dotty/tools/languageserver/util/Code.scala b/language-server/test/dotty/tools/languageserver/util/Code.scala index f88dff70ecaf..ae414e781164 100644 --- a/language-server/test/dotty/tools/languageserver/util/Code.scala +++ b/language-server/test/dotty/tools/languageserver/util/Code.scala @@ -190,7 +190,7 @@ object Code { } object Project { - private[this] val count = new java.util.concurrent.atomic.AtomicInteger() + private val count = new java.util.concurrent.atomic.AtomicInteger() private def freshName: String = s"project${count.incrementAndGet()}" /** diff --git a/language-server/test/dotty/tools/languageserver/util/CodeRange.scala b/language-server/test/dotty/tools/languageserver/util/CodeRange.scala index 4f2dda960bcb..2e5398601458 100644 --- a/language-server/test/dotty/tools/languageserver/util/CodeRange.scala +++ b/language-server/test/dotty/tools/languageserver/util/CodeRange.scala @@ -14,7 +14,7 @@ import PositionContext._ * @param end The end marker. */ case class CodeRange(start: CodeMarker, end: CodeMarker) { - private[this] var checked = false + private var checked = false def check(): PosCtx[Unit] = { if (!checked) { assert(start.file == end.file, s"$start and $end where not in the same file") diff --git a/language-server/test/dotty/tools/languageserver/util/PositionContext.scala b/language-server/test/dotty/tools/languageserver/util/PositionContext.scala index d99e0aa3a408..10629d900c92 100644 --- a/language-server/test/dotty/tools/languageserver/util/PositionContext.scala +++ b/language-server/test/dotty/tools/languageserver/util/PositionContext.scala @@ -4,8 +4,8 @@ import dotty.tools.languageserver.util.embedded.CodeMarker import dotty.tools.languageserver.util.server.TestFile class PositionContext(positionMap: Map[CodeMarker, (TestFile, Int, Int)]) { - private[this] var lastKey: CodeMarker = _ - private[this] var lastValue: (TestFile, Int, Int) = _ + private var lastKey: CodeMarker = _ + private var lastValue: (TestFile, Int, Int) = _ def positionOf(pos: CodeMarker): (TestFile, Int, Int) = { if (lastKey eq pos) lastValue else { diff --git a/language-server/test/dotty/tools/languageserver/util/server/TestClient.scala b/language-server/test/dotty/tools/languageserver/util/server/TestClient.scala index bd7275f4a0c4..efaa05781160 100644 --- a/language-server/test/dotty/tools/languageserver/util/server/TestClient.scala +++ b/language-server/test/dotty/tools/languageserver/util/server/TestClient.scala @@ -14,7 +14,7 @@ import scala.collection.mutable.Buffer class TestClient extends DottyClient { class Log[T] { - private[this] val log = Buffer.empty[T] + private val log = Buffer.empty[T] def +=(elem: T): this.type = { log += elem; this } def get: List[T] = log.toList diff --git a/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala b/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala index 06e0a7abad7a..749b92daba8c 100644 --- a/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala +++ b/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala @@ -21,7 +21,7 @@ class TestServer(testFolder: Path, projects: List[Project]) { init() - private[this] def init(): InitializeResult = { + private def init(): InitializeResult = { var compiledProjects: Set[Project] = Set.empty /** Compile the dependencies of the given project, and then the project. */ diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index 080751af9464..ea369539d021 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -25,12 +25,12 @@ object LazyVals { throwInitializationException() } - private[this] val base: Int = { + private val base: Int = { val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() 8 * processors * processors } - private[this] val monitors: Array[Object] = + private val monitors: Array[Object] = Array.tabulate(base)(_ => new Object) private def getMonitor(obj: Object, fieldId: Int = 0) = { diff --git a/project/Build.scala b/project/Build.scala index 13ebe9c028ae..fbd49edae589 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -579,6 +579,9 @@ object Build { // Note: bench/profiles/projects.yml should be updated accordingly. Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + // Use source 3.3 to avoid fatal migration warnings on scalajs-ir + scalacOptions ++= Seq("-source", "3.3"), + // Generate compiler.properties, used by sbt (Compile / resourceGenerators) += Def.task { import java.util._ diff --git a/sbt-test/compilerReporter/i14576/build.sbt b/sbt-test/compilerReporter/i14576/build.sbt index 9831c23c103e..f9f211b24977 100644 --- a/sbt-test/compilerReporter/i14576/build.sbt +++ b/sbt-test/compilerReporter/i14576/build.sbt @@ -10,7 +10,7 @@ lazy val resetMessages = taskKey[Unit]("empties the messages list") lazy val root = (project in file(".")) .settings( - scalacOptions += "-source:future", + scalacOptions += "-source:future-migration", extraAppenders := { s => Seq(ConsoleAppender(FakePrintWriter)) }, assertFeatureSummary := { assert { @@ -24,7 +24,7 @@ lazy val root = (project in file(".")) }, assertDeprecationSummary := { assert { - FakePrintWriter.messages.exists(_.contains("there were 3 deprecation warnings; re-run with -deprecation for details")) + FakePrintWriter.messages.exists(_.contains("there were 2 deprecation warnings; re-run with -deprecation for details")) } }, assertNoDeprecationSummary := { diff --git a/scaladoc-testcases/src/example/level2/Documentation.scala b/scaladoc-testcases/src/example/level2/Documentation.scala index bbfd31669f3c..f118b2ad150e 100644 --- a/scaladoc-testcases/src/example/level2/Documentation.scala +++ b/scaladoc-testcases/src/example/level2/Documentation.scala @@ -163,7 +163,7 @@ abstract class Documentation[T, A <: Int, B >: String, -X, +Y](c1: String, val c def table(foo: String) = ??? protected[example] val valWithScopeModifier = ??? - protected[this] val valWithScopeModifierThis = ??? + protected val valWithScopeModifierThis = ??? var iAmAVar = ??? } diff --git a/scaladoc-testcases/src/tests/visibility.scala b/scaladoc-testcases/src/tests/visibility.scala index b474eb15eebd..b0ca8d041c74 100644 --- a/scaladoc-testcases/src/tests/visibility.scala +++ b/scaladoc-testcases/src/tests/visibility.scala @@ -1,6 +1,8 @@ package tests package visibility +import scala.language.`3.3` // to avoid migration warnings/errors of private[this] + private object PrivateTopLevelObject //unexpected private[tests] object PrivateInOuterPackageTopLevelObject //unexpected diff --git a/staging/src/scala/quoted/staging/Compiler.scala b/staging/src/scala/quoted/staging/Compiler.scala index fbe6a3915a08..7f380dabd4e2 100644 --- a/staging/src/scala/quoted/staging/Compiler.scala +++ b/staging/src/scala/quoted/staging/Compiler.scala @@ -26,9 +26,9 @@ object Compiler: def make(appClassloader: ClassLoader)(implicit settings: Settings): Compiler = new Compiler: - private[this] val driver: QuoteDriver = new QuoteDriver(appClassloader) + private val driver: QuoteDriver = new QuoteDriver(appClassloader) - private[this] var running = false + private var running = false def run[T](exprBuilder: Quotes => Expr[T]): T = synchronized { try diff --git a/staging/src/scala/quoted/staging/QuoteCompiler.scala b/staging/src/scala/quoted/staging/QuoteCompiler.scala index 9fee0e41efd1..308ef4ff86e4 100644 --- a/staging/src/scala/quoted/staging/QuoteCompiler.scala +++ b/staging/src/scala/quoted/staging/QuoteCompiler.scala @@ -33,7 +33,7 @@ import scala.quoted.{Expr, Quotes, Type} private class QuoteCompiler extends Compiler: /** Either `Left` with name of the classfile generated or `Right` with the value contained in the expression */ - private[this] var result: Either[String, Any] = null + private var result: Either[String, Any] = null override protected def frontendPhases: List[List[Phase]] = List(List(new QuotedFrontend)) diff --git a/staging/src/scala/quoted/staging/QuoteDriver.scala b/staging/src/scala/quoted/staging/QuoteDriver.scala index 8de0cd218b23..93e19f195e00 100644 --- a/staging/src/scala/quoted/staging/QuoteDriver.scala +++ b/staging/src/scala/quoted/staging/QuoteDriver.scala @@ -21,7 +21,7 @@ import scala.annotation.tailrec private class QuoteDriver(appClassloader: ClassLoader) extends Driver: import tpd._ - private[this] val contextBase: ContextBase = new ContextBase + private val contextBase: ContextBase = new ContextBase def run[T](exprBuilder: Quotes => Expr[T], settings: Compiler.Settings): T = val outDir: AbstractFile = diff --git a/tests/init/neg/function11.scala b/tests/init/neg/function11.scala index cb6626291214..278192d003aa 100644 --- a/tests/init/neg/function11.scala +++ b/tests/init/neg/function11.scala @@ -1,5 +1,5 @@ final class Capture { - private[this] var m: Boolean = false + private var m: Boolean = false (0 to 10).foreach { i => // error f() @@ -14,7 +14,7 @@ final class Capture { } final class Capture2 { - private[this] var m: Boolean = false + private var m: Boolean = false (0 to 10).foreach { i => f() diff --git a/tests/neg-custom-args/captures/leaking-iterators.scala b/tests/neg-custom-args/captures/leaking-iterators.scala index 50447874a3c3..99d50a27336b 100644 --- a/tests/neg-custom-args/captures/leaking-iterators.scala +++ b/tests/neg-custom-args/captures/leaking-iterators.scala @@ -36,7 +36,7 @@ trait Iterator[+A] extends IterableOnce[A]: def ++[B >: A](xs: IterableOnce[B]^): Iterator[B]^{this, xs} end Iterator -private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^): +private final class ConcatIteratorCell[A](head: => IterableOnce[A]^): def headIterator: Iterator[A]^{this} = head.iterator def usingLogFile[sealed R](op: FileOutputStream^ => R): R = diff --git a/tests/neg-scalajs/js-native-members.check b/tests/neg-scalajs/js-native-members.check index 466dbc9d2063..3fca2a9003c5 100644 --- a/tests/neg-scalajs/js-native-members.check +++ b/tests/neg-scalajs/js-native-members.check @@ -1,3 +1,33 @@ +-- Warning: tests/neg-scalajs/js-native-members.scala:24:16 ------------------------------------------------------------ +24 | private[this] def this(x: Int) = this() // ok + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:28:16 ------------------------------------------------------------ +28 | private[this] val a: Int = js.native // error + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:32:16 ------------------------------------------------------------ +32 | private[this] var d: Int = js.native // error + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:36:16 ------------------------------------------------------------ +36 | private[this] def g(): Int = js.native // error + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Warning: tests/neg-scalajs/js-native-members.scala:49:25 ------------------------------------------------------------ +49 | class X3 private[this] () extends js.Object { // ok + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. -- Error: tests/neg-scalajs/js-native-members.scala:9:24 --------------------------------------------------------------- 9 | def this(z: String) = this(z.length, z) // error | ^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/js-non-native-members-qualified-private.check b/tests/neg-scalajs/js-non-native-members-qualified-private.check index 915a1bbe89eb..fb06b91f98f8 100644 --- a/tests/neg-scalajs/js-non-native-members-qualified-private.check +++ b/tests/neg-scalajs/js-non-native-members-qualified-private.check @@ -1,3 +1,9 @@ +-- Warning: tests/neg-scalajs/js-non-native-members-qualified-private.scala:52:28 -------------------------------------- +52 | class B private[this] () extends js.Object // ok + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. -- Error: tests/neg-scalajs/js-non-native-members-qualified-private.scala:6:32 ----------------------------------------- 6 | private[Enclosing1] def foo(i: Int): Int = i // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg/i15503c.scala b/tests/neg/i15503c.scala index b93ce8825114..040dae43a2c9 100644 --- a/tests/neg/i15503c.scala +++ b/tests/neg/i15503c.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -Wunused:privates +//> using options -Xfatal-warnings -Wunused:privates -source:3.3 trait C class A: self: C => // OK diff --git a/tests/neg/i16639a.scala b/tests/neg/i16639a.scala index 66db96901e74..5b348d594f5b 100644 --- a/tests/neg/i16639a.scala +++ b/tests/neg/i16639a.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -Wunused:all +//> using options -Xfatal-warnings -Wunused:all -source:3.3 // class Bippy(a: Int, b: Int) { private def this(c: Int) = this(c, c) // warn /Dotty:NoWarn diff --git a/tests/neg/i17612a.scala b/tests/neg/i17612a.scala index 1145cd76edc0..099b528965e1 100644 --- a/tests/neg/i17612a.scala +++ b/tests/neg/i17612a.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -Xlint:private-shadow +//> using options -Xfatal-warnings -Xlint:private-shadow -source:3.3 object i17612a: class Base(var x: Int, val y: Int, var z: Int): diff --git a/tests/neg/i17612b/i17612b.scala b/tests/neg/i17612b/i17612b.scala index b59352f562d0..d16feb240c2a 100644 --- a/tests/neg/i17612b/i17612b.scala +++ b/tests/neg/i17612b/i17612b.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -Xlint:private-shadow +//> using options -Xfatal-warnings -Xlint:private-shadow -source:3.3 object i17612b: diff --git a/tests/neg/nonunit-statement.scala b/tests/neg/nonunit-statement.scala index 73c190619187..94346031077c 100644 --- a/tests/neg/nonunit-statement.scala +++ b/tests/neg/nonunit-statement.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -Wnonunit-statement -Wvalue-discard +//> using options -Xfatal-warnings -Wnonunit-statement -Wvalue-discard -source:3.3 import collection.ArrayOps import collection.mutable.{ArrayBuilder, LinkedHashSet, ListBuffer} import concurrent._ diff --git a/tests/neg/private-this-3.4.check b/tests/neg/private-this-3.4.check new file mode 100644 index 000000000000..29c2fe909ede --- /dev/null +++ b/tests/neg/private-this-3.4.check @@ -0,0 +1,12 @@ +-- Error: tests/neg/private-this-3.4.scala:6:16 ------------------------------------------------------------------------ +6 | private[this] def foo: Int = ??? // error: migration warning + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- Error: tests/neg/private-this-3.4.scala:7:18 ------------------------------------------------------------------------ +7 | protected[this] def bar: Int = ??? // error: migration warning + | ^ + | The [this] qualifier will be deprecated in the future; it should be dropped. + | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/private-this-3.4.scala b/tests/neg/private-this-3.4.scala new file mode 100644 index 000000000000..b198e954e41b --- /dev/null +++ b/tests/neg/private-this-3.4.scala @@ -0,0 +1,7 @@ +//> using options -Werror + +import scala.language.`3.4` + +class Foo: + private[this] def foo: Int = ??? // error: migration warning + protected[this] def bar: Int = ??? // error: migration warning diff --git a/tests/neg/private-this-future-migration.scala b/tests/neg/private-this-future-migration.scala new file mode 100644 index 000000000000..7e3da2be72e2 --- /dev/null +++ b/tests/neg/private-this-future-migration.scala @@ -0,0 +1,7 @@ +//> using options -Werror + +import scala.language.`future-migration` + +class Foo: + private[this] def foo: Int = ??? // error: migration warning + protected[this] def bar: Int = ??? // error: migration warning diff --git a/tests/neg/private-this-future.scala b/tests/neg/private-this-future.scala new file mode 100644 index 000000000000..d94cbe16abad --- /dev/null +++ b/tests/neg/private-this-future.scala @@ -0,0 +1,5 @@ +import scala.language.future + +class Foo: + private[this] def foo: Int = ??? // error + protected[this] def bar: Int = ??? // error diff --git a/tests/pos/i6290.scala b/tests/pos/i6290.scala index d0da0ea328e4..bc3646f1e1d2 100644 --- a/tests/pos/i6290.scala +++ b/tests/pos/i6290.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings -deprecation -feature +//> using options -Xfatal-warnings -deprecation -feature -source:3.3 class TC { type T } diff --git a/tests/pos/private-this-future-migration.scala b/tests/pos/private-this-future-migration.scala new file mode 100644 index 000000000000..cdcc6a2c0321 --- /dev/null +++ b/tests/pos/private-this-future-migration.scala @@ -0,0 +1,5 @@ +import scala.language.`future-migration` + +class Foo: + private[this] def foo: Int = ??? // warn + protected[this] def bar: Int = ??? // warn diff --git a/tests/pos/private-this.scala b/tests/pos/private-this.scala new file mode 100644 index 000000000000..18de91df72cb --- /dev/null +++ b/tests/pos/private-this.scala @@ -0,0 +1,3 @@ +class Foo: + private[this] def foo: Int = ??? + protected[this] def bar: Int = ??? diff --git a/tests/rewrites/private-this.check b/tests/rewrites/private-this.check new file mode 100644 index 000000000000..1a6443cdf152 --- /dev/null +++ b/tests/rewrites/private-this.check @@ -0,0 +1,12 @@ +class Foo: + private def foo1: Int = ??? + private def foo2: Int = ??? + private def foo3: Int = ??? + private def foo4: Int = ??? + private def foo5: Int = ??? + + protected def bar1: Int = ??? + protected def bar2: Int = ??? + protected def bar3: Int = ??? + protected def bar4: Int = ??? + protected def bar5: Int = ??? diff --git a/tests/rewrites/private-this.scala b/tests/rewrites/private-this.scala new file mode 100644 index 000000000000..5f5b71f26abe --- /dev/null +++ b/tests/rewrites/private-this.scala @@ -0,0 +1,12 @@ +class Foo: + private[this] def foo1: Int = ??? + private[ this] def foo2: Int = ??? + private[this ] def foo3: Int = ??? + private[ this ] def foo4: Int = ??? + private [this] def foo5: Int = ??? + + protected[this] def bar1: Int = ??? + protected[ this] def bar2: Int = ??? + protected[this ] def bar3: Int = ??? + protected[ this ] def bar4: Int = ??? + protected [this] def bar5: Int = ??? diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index e05a645c0141..8e790621ba07 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -8,7 +8,7 @@ Text => empty Language => Scala Symbols => 9 entries Occurrences => 19 entries -Diagnostics => 2 entries +Diagnostics => 4 entries Symbols: example/Access# => class Access extends Object { self: Access => +8 decls } @@ -44,7 +44,13 @@ Occurrences: Diagnostics: [3:14..3:16): [warning] unused private member +[4:16..4:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. [4:20..4:22): [warning] unused private member +[7:18..7:18): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. expect/Advanced.scala --------------------- @@ -559,7 +565,7 @@ Text => empty Language => Scala Symbols => 108 entries Occurrences => 127 entries -Diagnostics => 4 entries +Diagnostics => 6 entries Synthetics => 2 entries Symbols: @@ -803,7 +809,13 @@ Occurrences: Diagnostics: [18:9..18:10): [warning] unused explicit parameter +[20:23..20:23): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. [20:27..20:28): [warning] unused explicit parameter +[22:23..22:23): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. [22:27..22:28): [warning] unused explicit parameter [24:10..24:11): [warning] unused explicit parameter @@ -3914,7 +3926,7 @@ Text => empty Language => Scala Symbols => 42 entries Occurrences => 129 entries -Diagnostics => 1 entries +Diagnostics => 5 entries Symbols: example/ValUsages. => final object ValUsages extends Object { self: ValUsages.type => +2 decls } @@ -4093,6 +4105,18 @@ Occurrences: Diagnostics: [2:20..2:21): [warning] unused explicit parameter +[5:16..5:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[7:16..7:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[12:16..12:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[13:16..13:16): [warning] The [this] qualifier will be deprecated in the future; it should be dropped. +See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +This construct can be rewritten automatically under -rewrite -source 3.4-migration. expect/Vararg.scala ------------------- From f429fa53f8bb407dbf8d3a34a8a60ef020b6ad61 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 8 Nov 2023 09:05:38 +0100 Subject: [PATCH 165/216] Update TASTy MiMa to 1.0.0 --- project/TastyMiMaFilters.scala | 15 ++++++++++++--- project/plugins.sbt | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/project/TastyMiMaFilters.scala b/project/TastyMiMaFilters.scala index 1573e54dd4eb..f023fbe63ef3 100644 --- a/project/TastyMiMaFilters.scala +++ b/project/TastyMiMaFilters.scala @@ -55,6 +55,18 @@ object TastyMiMaFilters { ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.i0_="), ProblemMatcher.make(ProblemKind.NewAbstractMember, "scala.collection.convert.impl.TableStepperBase.maxLength_="), + // Problem: ??? + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.nn"), // The member scala.Predef.nn with signature (1,java.lang.Object):java.lang.Object does not have a correspondant in current version + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.ne"), // The member scala.Predef.ne with signature (java.lang.Object,java.lang.Object):scala.Boolean does not have a correspondant in current version + ProblemMatcher.make(ProblemKind.MissingTermMember, "scala.Predef.eq"), // The member scala.Predef.eq with signature (java.lang.Object,java.lang.Object):scala.Boolean does not have a correspondant in current version + + // Problem: protected lazy val (processThread, (futureThread, futureValue), destroyer) = { ... } + // https://github.com/scala/scala/blob/cff8a9af4da67658d8e1e32f929e1aff03ffa384/src/library/scala/sys/process/ProcessImpl.scala#L99C5-L99C83 + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.destroyer"), // before: lazy val; after: def + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.futureThread"), // before: lazy val; after: def + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.processThread"), // before: lazy val; after: def + ProblemMatcher.make(ProblemKind.IncompatibleKindChange, "scala.sys.process.ProcessImpl.CompoundProcess.futureValue"), // before: lazy val; after: def + // Problem: ??? // Member is defined and has explicit result type // https://github.com/scala/scala/blob/2.13.x/src/library/scala/collection/convert/JavaCollectionWrappers.scala#L66-L71 @@ -66,9 +78,6 @@ object TastyMiMaFilters { ProblemMatcher.make(ProblemKind.InternalError, "scala.collection.SeqView.prependedAll"), ProblemMatcher.make(ProblemKind.InternalError, "scala.concurrent.duration.package.*"), - // Problem? Very complicated signature - ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.generic.IsMap.mapOpsIsMap"), // The symbol scala.collection.generic.IsMap.mapOpsIsMap has an incompatible type in current version: before: [CC0 <: ([X, Y] =>> scala.collection.MapOps[X, Y, ([X, Y] =>> scala.collection.Iterable[scala.Tuple2[X, Y]]), CC0[X, Y]]), K0, V0](((scala.collection.generic.IsMap[CC0[K0, V0]] { type V = V0 }) { type C = CC0[.this.K, .this.V] }) { type K = K0 }); after: [CC0 >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.collection.MapOps[X, Y, IsMap$.this.Tupled[([A] =>> scala.collection.Iterable[A])]#Ap, CC0[X, Y]]), K0, V0]{ 726875885 => (((scala.collection.generic.IsMap[CC0[K0, V0]] { type K = K0 }) { type V = V0 }) { type C = CC0[726875885.K, 726875885.V] }) } - // Problems introduced in 2.13.11: Implicit classes with complex signatures ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFromLowPriority1.buildFromSortedSetOps"), // The symbol scala.collection.BuildFromLowPriority1.buildFromSortedSetOps has an incompatible type in current version: before: [CC <: ([X] =>> (scala.collection.SortedSet[X] & scala.collection.SortedSetOps[X, CC, ?])), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[(CC[A0] & scala.collection.SortedSet[A0]), A, (CC[A] & scala.collection.SortedSet[A])]; after: [CC >: ([X] =>> scala.Nothing) <: ([X] =>> scala.&[scala.collection.SortedSet[X], scala.collection.SortedSetOps[X, CC, ?]]), A0, A](evidence$3: scala.package.Ordering[A])scala.collection.BuildFrom[scala.&[CC[A0], scala.collection.SortedSet[A0]], A, scala.&[CC[A], scala.collection.SortedSet[A]]] ProblemMatcher.make(ProblemKind.IncompatibleTypeChange, "scala.collection.BuildFrom.buildFromMapOps"), // The symbol scala.collection.BuildFrom.buildFromMapOps has an incompatible type in current version: before: [CC <: ([X, Y] =>> (scala.collection.Map[X, Y] & scala.collection.MapOps[X, Y, CC, ?])), K0, V0, K, V]scala.collection.BuildFrom[(CC[K0, V0] & scala.collection.Map[K0, V0]), scala.Tuple2[K, V], (CC[K, V] & scala.collection.Map[K, V])]; after: [CC >: ([X, Y] =>> scala.Nothing) <: ([X, Y] =>> scala.&[scala.collection.Map[X, Y], scala.collection.MapOps[X, Y, CC, ?]]), K0, V0, K, V]scala.collection.BuildFrom[scala.&[CC[K0, V0], scala.collection.Map[K0, V0]], scala.Tuple2[K, V], scala.&[CC[K, V], scala.collection.Map[K, V]]] diff --git a/project/plugins.sbt b/project/plugins.sbt index 87802edf302a..c94d4d5afe8d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -20,4 +20,4 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") -addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "0.4.0") +addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") From 61917a67c9f9d4c1886dcf340eb440e65d3c82ab Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 3 Nov 2023 16:09:25 +0100 Subject: [PATCH 166/216] Deprecation warning for `with` type operator in 3.4 --- .../scala/dotty/communitybuild/projects.scala | 2 +- .../dotty/tools/dotc/parsing/Parsers.scala | 9 ++++---- compiler/test-resources/repl/i6643 | 9 ++++++-- compiler/test-resources/type-printer/infix | 21 +++++++++++++++++++ tests/neg-scalajs/js-native-members.check | 7 +++++++ ...constructorof-error-in-prepjsinterop.check | 14 +++++++++++++ ...onstructortag-error-in-prepjsinterop.check | 14 +++++++++++++ tests/neg/i2887b.scala | 2 +- tests/neg/i8736.scala | 6 +++--- .../with-type-operator-future-migration.check | 2 +- tests/pos-deep-subtype/3324h.scala | 4 +++- tests/pos/with-type-operator-3.3.scala | 5 +++++ ...=> with-type-operator-3.4-migration.scala} | 2 +- .../with-type-operator-future-migration.scala | 3 --- tests/semanticdb/metac.expect | 11 +++++++++- 15 files changed, 93 insertions(+), 18 deletions(-) create mode 100644 tests/pos/with-type-operator-3.3.scala rename tests/pos/{with-type-operator.scala => with-type-operator-3.4-migration.scala} (51%) delete mode 100644 tests/pos/with-type-operator-future-migration.scala diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 974800cdcce1..f0a048145099 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -404,7 +404,7 @@ object projects: project = "zio", sbtTestCommand = "testJVMDotty", sbtDocCommand = forceDoc("coreJVM"), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Xcheck-macros"), + scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Xcheck-macros"), dependencies =List(izumiReflect) ) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index d479c7de8cc7..03d1bafc11f8 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -1757,11 +1757,12 @@ object Parsers { t else val withSpan = Span(withOffset, withOffset + 4) - report.errorOrMigrationWarning( - DeprecatedWithOperator(rewriteNotice(`future-migration`)), + report.gradualErrorOrMigrationWarning( + DeprecatedWithOperator(rewriteNotice(`3.4-migration`)), source.atSpan(withSpan), - from = future) - if sourceVersion == `future-migration` then + warnFrom = `3.4`, + errorFrom = future) + if sourceVersion.isMigrating && sourceVersion.isAtLeast(`3.4-migration`) then patch(source, withSpan, "&") atSpan(startOffset(t)) { makeAndType(t, withType()) } else t diff --git a/compiler/test-resources/repl/i6643 b/compiler/test-resources/repl/i6643 index e139ae9f7f94..ff482d8ce5f1 100644 --- a/compiler/test-resources/repl/i6643 +++ b/compiler/test-resources/repl/i6643 @@ -1,7 +1,12 @@ scala> import scala.collection._ - scala>:type 1 Int - scala> object IterableTest { def g[CC[_] <: Iterable[_] with IterableOps[_, _, _]](from: CC[Int]): IterableFactory[CC] = ??? } +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | object IterableTest { def g[CC[_] <: Iterable[_] with IterableOps[_, _, _]](from: CC[Int]): IterableFactory[CC] = ??? } + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` // defined object IterableTest diff --git a/compiler/test-resources/type-printer/infix b/compiler/test-resources/type-printer/infix index a7904ae9ec43..2fe2864ad9fe 100644 --- a/compiler/test-resources/type-printer/infix +++ b/compiler/test-resources/type-printer/infix @@ -48,10 +48,31 @@ def foo: Int && Boolean & String scala> def foo: Int && (Boolean & String) = ??? def foo: Int && (Boolean & String) scala> def foo: Int && (Boolean with String) = ??? +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | def foo: Int && (Boolean with String) = ??? + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` def foo: Int && (Boolean & String) scala> def foo: (Int && Boolean) with String = ??? +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | def foo: (Int && Boolean) with String = ??? + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` def foo: Int && Boolean & String scala> def foo: Int && Boolean with String = ??? +1 warning found +-- [E003] Syntax Warning: ------------------------------------------------------ +1 | def foo: Int && Boolean with String = ??? + | ^^^^ + | with as a type operator has been deprecated; use & instead + | + | longer explanation available when compiling with `-explain` def foo: Int && (Boolean & String) scala> def foo: Int && Boolean | String = ??? def foo: Int && Boolean | String diff --git a/tests/neg-scalajs/js-native-members.check b/tests/neg-scalajs/js-native-members.check index 3fca2a9003c5..11acee62af90 100644 --- a/tests/neg-scalajs/js-native-members.check +++ b/tests/neg-scalajs/js-native-members.check @@ -28,6 +28,13 @@ | The [this] qualifier will be deprecated in the future; it should be dropped. | See: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html | This construct can be rewritten automatically under -rewrite -source 3.4-migration. +-- [E003] Syntax Warning: tests/neg-scalajs/js-native-members.scala:58:44 ---------------------------------------------- +58 | def assign[T, U](target: T, source: U): T with U = js.native // ok + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-scalajs/js-native-members.scala:9:24 --------------------------------------------------------------- 9 | def this(z: String) = this(z.length, z) // error | ^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check index 7687543ea75f..fe55c0caee52 100644 --- a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check @@ -1,3 +1,17 @@ +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:16:41 ------------------------- +16 | val c = js.constructorOf[NativeJSClass with NativeJSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:22:35 ------------------------- +22 | val g = js.constructorOf[JSClass with JSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:13:27 ----------------------------------------- 13 | val a = js.constructorOf[NativeJSTrait] // error | ^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check index 142de318efd3..df09d5b1953d 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check @@ -1,3 +1,17 @@ +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:16:42 ------------------------ +16 | val c = js.constructorTag[NativeJSClass with NativeJSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` +-- [E003] Syntax Warning: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:22:36 ------------------------ +22 | val g = js.constructorTag[JSClass with JSTrait] // error + | ^^^^ + | with as a type operator has been deprecated; use & instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:13:42 ---------------------------------------- 13 | val a = js.constructorTag[NativeJSTrait] // error | ^ diff --git a/tests/neg/i2887b.scala b/tests/neg/i2887b.scala index 649d869678cb..b41a392271e2 100644 --- a/tests/neg/i2887b.scala +++ b/tests/neg/i2887b.scala @@ -4,7 +4,7 @@ trait C { type M <: B } trait D { type M >: A } object Test { - def test(x: C with D): Unit = { + def test(x: C & D): Unit = { def foo(a: A, b: B)(z: a.S[b.I,a.I][b.S[a.I,a.I]]) = z def bar(a: A, y: x.M) = foo(a,y) def baz(a: A) = bar(a, a) diff --git a/tests/neg/i8736.scala b/tests/neg/i8736.scala index dc2fa1821791..9724e5f81b2a 100644 --- a/tests/neg/i8736.scala +++ b/tests/neg/i8736.scala @@ -14,13 +14,13 @@ object App extends App { def field[V](s: String)(v: V): Rec[s.type, V] = Rec0(Map(s -> v)).asInstanceOf[Rec[s.type, V]] implicit class RecOps[R <: Rec0[_]](has: R) { - def +[K1 <: String, V1](that: Rec[K1, V1]): R with Rec[K1, V1] = Rec0(has.map ++ that.map).asInstanceOf[R with Rec[K1, V1]] + def +[K1 <: String, V1](that: Rec[K1, V1]): R & Rec[K1, V1] = Rec0(has.map ++ that.map).asInstanceOf[R & Rec[K1, V1]] } def rec: Rec["k", String] - with Rec["v", Int] - with Rec["z", Boolean] + & Rec["v", Int] + & Rec["z", Boolean] = { field("k")("Str") + field("v")(0) + diff --git a/tests/neg/with-type-operator-future-migration.check b/tests/neg/with-type-operator-future-migration.check index 845601349c83..e56049880431 100644 --- a/tests/neg/with-type-operator-future-migration.check +++ b/tests/neg/with-type-operator-future-migration.check @@ -2,6 +2,6 @@ 5 |def foo: Int with String = ??? // error | ^^^^ | with as a type operator has been deprecated; use & instead - | This construct can be rewritten automatically under -rewrite -source future-migration. + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. | | longer explanation available when compiling with `-explain` diff --git a/tests/pos-deep-subtype/3324h.scala b/tests/pos-deep-subtype/3324h.scala index bff25ee3c888..6bae5f6a01d9 100644 --- a/tests/pos-deep-subtype/3324h.scala +++ b/tests/pos-deep-subtype/3324h.scala @@ -1,5 +1,7 @@ //> using options -Xfatal-warnings +import scala.language.`3.3` + object Test { trait Marker def foo[T](x: T) = x match { @@ -8,7 +10,7 @@ object Test { } def foo2[T](x: T) = x match { - case _: T with Marker => // scalac emits a warning + case _: T with Marker => // scalac or 3.4 emits a warning case _ => } } diff --git a/tests/pos/with-type-operator-3.3.scala b/tests/pos/with-type-operator-3.3.scala new file mode 100644 index 000000000000..2b40939d71b0 --- /dev/null +++ b/tests/pos/with-type-operator-3.3.scala @@ -0,0 +1,5 @@ +//> using options -Werror + +import scala.language.`3.3` + +def foo: Int with String = ??? diff --git a/tests/pos/with-type-operator.scala b/tests/pos/with-type-operator-3.4-migration.scala similarity index 51% rename from tests/pos/with-type-operator.scala rename to tests/pos/with-type-operator-3.4-migration.scala index d1fa5e2c34b7..27761a5e4a7f 100644 --- a/tests/pos/with-type-operator.scala +++ b/tests/pos/with-type-operator-3.4-migration.scala @@ -1,3 +1,3 @@ -//> using options -Werror +import scala.language.`3.4-migration` def foo: Int with String = ??? // warn diff --git a/tests/pos/with-type-operator-future-migration.scala b/tests/pos/with-type-operator-future-migration.scala deleted file mode 100644 index d6fe5205fd3d..000000000000 --- a/tests/pos/with-type-operator-future-migration.scala +++ /dev/null @@ -1,3 +0,0 @@ -import scala.language.`future-migration` - -def foo: Int with String = ??? // warn diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 8e790621ba07..ebca10683668 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -3389,6 +3389,7 @@ Text => empty Language => Scala Symbols => 13 entries Occurrences => 22 entries +Diagnostics => 1 entries Symbols: local0 => selfparam self: C1 @@ -3429,6 +3430,10 @@ Occurrences: [13:17..13:17): <- selfs/C6#``(). [13:27..13:28): B -> selfs/B# +Diagnostics: +[10:29..10:33): [warning] with as a type operator has been deprecated; use & instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. + expect/StructuralTypes.scala ---------------------------- @@ -5054,7 +5059,7 @@ Text => empty Language => Scala Symbols => 143 entries Occurrences => 246 entries -Diagnostics => 1 entries +Diagnostics => 3 entries Synthetics => 1 entries Symbols: @@ -5452,6 +5457,10 @@ Occurrences: Diagnostics: [5:13..5:14): [warning] unused explicit parameter +[62:25..62:29): [warning] with as a type operator has been deprecated; use & instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. +[63:25..63:29): [warning] with as a type operator has been deprecated; use & instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. Synthetics: [68:20..68:24):@ann => *[Int] From 038b51e99d2068f4ef17864148619dcf047244b5 Mon Sep 17 00:00:00 2001 From: Hamza Remmal <56235032+hamzaremmal@users.noreply.github.com> Date: Thu, 2 Nov 2023 13:54:57 +0100 Subject: [PATCH 167/216] Enter missing symbols in MacroAnnotations --- .../dotty/tools/dotc/transform/Inlining.scala | 5 +-- .../dotc/transform/MacroAnnotations.scala | 32 +++++++++++++++---- tests/neg-macros/i18825.check | 3 ++ tests/neg-macros/i18825/Macro_1.scala | 19 +++++++++++ tests/neg-macros/i18825/Test_2.scala | 15 +++++++++ tests/neg-macros/wrong-owner.check | 23 +++++++++++++ tests/neg-macros/wrong-owner/Macro_1.scala | 19 +++++++++++ tests/neg-macros/wrong-owner/Test_2.scala | 5 +++ tests/run-macros/i18806.check | 1 + tests/run-macros/i18806/Macro_1.scala | 24 ++++++++++++++ tests/run-macros/i18806/Test_2.scala | 14 ++++++++ tests/run/quotes-add-erased/Macro_1.scala | 2 +- 12 files changed, 153 insertions(+), 9 deletions(-) create mode 100644 tests/neg-macros/i18825.check create mode 100644 tests/neg-macros/i18825/Macro_1.scala create mode 100644 tests/neg-macros/i18825/Test_2.scala create mode 100644 tests/neg-macros/wrong-owner.check create mode 100644 tests/neg-macros/wrong-owner/Macro_1.scala create mode 100644 tests/neg-macros/wrong-owner/Test_2.scala create mode 100644 tests/run-macros/i18806.check create mode 100644 tests/run-macros/i18806/Macro_1.scala create mode 100644 tests/run-macros/i18806/Test_2.scala diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index bfc44f868cb6..a51ba93ab9ac 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -17,7 +17,8 @@ import dotty.tools.dotc.staging.StagingLevel import scala.collection.mutable.ListBuffer /** Inlines all calls to inline methods that are not in an inline method or a quote */ -class Inlining extends MacroTransform { +class Inlining extends MacroTransform, IdentityDenotTransformer { + self => import tpd.* @@ -75,7 +76,7 @@ class Inlining extends MacroTransform { && StagingLevel.level == 0 && MacroAnnotations.hasMacroAnnotation(tree.symbol) then - val trees = (new MacroAnnotations).expandAnnotations(tree) + val trees = (new MacroAnnotations(self)).expandAnnotations(tree) val trees1 = trees.map(super.transform) // Find classes added to the top level from a package object diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index 40c6eee1382c..dbc1639f4b55 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -9,7 +9,7 @@ import dotty.tools.dotc.config.Printers.{macroAnnot => debug} import dotty.tools.dotc.core.Annotations.* import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.DenotTransformers.DenotTransformer +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.MacroClassLoader import dotty.tools.dotc.core.Symbols.* @@ -23,7 +23,8 @@ import scala.util.control.NonFatal import java.lang.reflect.InvocationTargetException -class MacroAnnotations: +class MacroAnnotations(phase: IdentityDenotTransformer): + import tpd.* import MacroAnnotations.* @@ -58,9 +59,11 @@ class MacroAnnotations: case (prefixed, newTree :: suffixed) => allTrees ++= prefixed insertedAfter = suffixed :: insertedAfter - prefixed.foreach(checkMacroDef(_, tree, annot)) - suffixed.foreach(checkMacroDef(_, tree, annot)) - transform.TreeChecker.checkMacroGeneratedTree(tree, newTree) + for prefixedTree <- prefixed do + checkMacroDef(prefixedTree, tree, annot) + for suffixedTree <- suffixed do + checkMacroDef(suffixedTree, tree, annot) + TreeChecker.checkMacroGeneratedTree(tree, newTree) newTree case (Nil, Nil) => report.error(i"Unexpected `Nil` returned by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) @@ -76,6 +79,7 @@ class MacroAnnotations: insertedAfter.foreach(allTrees.++=) val result = allTrees.result() + for tree <- result do enterMissingSymbols(tree) debug.println(result.map(_.show).mkString("expanded to:\n", "\n", "")) result @@ -120,7 +124,7 @@ class MacroAnnotations: /** Check that this tree can be added by the macro annotation */ private def checkMacroDef(newTree: DefTree, annotatedTree: Tree, annot: Annotation)(using Context) = - transform.TreeChecker.checkMacroGeneratedTree(annotatedTree, newTree) + TreeChecker.checkMacroGeneratedTree(annotatedTree, newTree) val sym = newTree.symbol val annotated = annotatedTree.symbol if sym.isType && !sym.isClass then @@ -130,6 +134,22 @@ class MacroAnnotations: else if annotated.isClass && annotated.owner.is(Package) /*&& !sym.isClass*/ then report.error(i"macro annotation can not add top-level ${sym.showKind}. $annot tried to add $sym.", annot.tree) + /** + * Enter the symbols generated by MacroAnnotations + */ + private def enterMissingSymbols(tree: DefTree)(using Context) = new TreeTraverser { + def traverse(tree: tpd.Tree)(using Context): Unit = tree match + case tdef @ TypeDef(_, template: Template) => + val isSymbolInDecls = tdef.symbol.asClass.info.decls.toList.toSet + for tree <- template.body do + if tree.symbol.owner != tdef.symbol then + report.error(em"Macro added a definition with the wrong owner - ${tree.symbol.owner} - ${tdef.symbol} in ${tree.source}", tree.srcPos) + else if !isSymbolInDecls(tree.symbol) then + tree.symbol.enteredAfter(phase) + traverseChildren(tree) + case _ => traverseChildren(tree) + }.traverse(tree) + object MacroAnnotations: /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ diff --git a/tests/neg-macros/i18825.check b/tests/neg-macros/i18825.check new file mode 100644 index 000000000000..0269f9880828 --- /dev/null +++ b/tests/neg-macros/i18825.check @@ -0,0 +1,3 @@ + +error overriding method toString in class Foo of type (): String; + method toString of type (): String cannot override final member method toString in class Foo diff --git a/tests/neg-macros/i18825/Macro_1.scala b/tests/neg-macros/i18825/Macro_1.scala new file mode 100644 index 000000000000..c099954f3858 --- /dev/null +++ b/tests/neg-macros/i18825/Macro_1.scala @@ -0,0 +1,19 @@ +import scala.annotation.experimental +import scala.annotation.MacroAnnotation +import scala.quoted.* + +@experimental +class toString extends MacroAnnotation : + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect.* + tree match + case ClassDef(name, ctr, parents, self, body) => + val cls = tree.symbol + val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") + val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) + val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) + val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + List(newClassDef) + case _ => + report.error("@toString can only be annotated on class definitions") + tree :: Nil diff --git a/tests/neg-macros/i18825/Test_2.scala b/tests/neg-macros/i18825/Test_2.scala new file mode 100644 index 000000000000..83ae9c778704 --- /dev/null +++ b/tests/neg-macros/i18825/Test_2.scala @@ -0,0 +1,15 @@ +// nopos-error + +import annotation.experimental + +class Foo : + final override def toString(): String = "Hello" + +@experimental +@toString +class AFoo extends Foo //: + //override def toString(): String = "Hello from macro" + +@experimental +@main def run = + println(new AFoo().toString) diff --git a/tests/neg-macros/wrong-owner.check b/tests/neg-macros/wrong-owner.check new file mode 100644 index 000000000000..ccaca98e3948 --- /dev/null +++ b/tests/neg-macros/wrong-owner.check @@ -0,0 +1,23 @@ + +-- Error: tests/neg-macros/wrong-owner/Test_2.scala:5:6 ---------------------------------------------------------------- +3 |@experimental +4 |@wrongOwner +5 |class Foo // error + |^ + |Malformed tree was found while expanding macro with -Xcheck-macros. + | |The tree does not conform to the compiler's tree invariants. + | | + | |Macro was: + | |@scala.annotation.internal.SourceFile("tests/neg-macros/wrong-owner/Test_2.scala") @wrongOwner @scala.annotation.experimental class Foo() + | | + | |The macro returned: + | |@scala.annotation.internal.SourceFile("tests/neg-macros/wrong-owner/Test_2.scala") @wrongOwner @scala.annotation.experimental class Foo() { + | override def toString(): java.lang.String = "Hello from macro" + |} + | | + | |Error: + | |assertion failed: bad owner; method toString has owner class String, expected was class Foo + |owner chain = method toString, class String, package java.lang, package java, package , ctxOwners = class Foo, class Foo, package , package , package , package , package , package , package , package , package , , , , , + | | + |stacktrace available when compiling with `-Ydebug` + | | diff --git a/tests/neg-macros/wrong-owner/Macro_1.scala b/tests/neg-macros/wrong-owner/Macro_1.scala new file mode 100644 index 000000000000..85127b701f81 --- /dev/null +++ b/tests/neg-macros/wrong-owner/Macro_1.scala @@ -0,0 +1,19 @@ +import scala.annotation.experimental +import scala.annotation.MacroAnnotation +import scala.quoted.* + +@experimental +class wrongOwner extends MacroAnnotation : + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect.* + tree match + case ClassDef(name, ctr, parents, self, body) => + val cls = tree.symbol + val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") + val toStringOverrideSym = Symbol.newMethod(Symbol.classSymbol("java.lang.String"), "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) + val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) + val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + List(newClassDef) + case _ => + report.error("@toString can only be annotated on class definitions") + tree :: Nil diff --git a/tests/neg-macros/wrong-owner/Test_2.scala b/tests/neg-macros/wrong-owner/Test_2.scala new file mode 100644 index 000000000000..ccba69beeb86 --- /dev/null +++ b/tests/neg-macros/wrong-owner/Test_2.scala @@ -0,0 +1,5 @@ +import scala.annotation.experimental + +@experimental +@wrongOwner +class Foo // error diff --git a/tests/run-macros/i18806.check b/tests/run-macros/i18806.check new file mode 100644 index 000000000000..32f95c0d1244 --- /dev/null +++ b/tests/run-macros/i18806.check @@ -0,0 +1 @@ +hi \ No newline at end of file diff --git a/tests/run-macros/i18806/Macro_1.scala b/tests/run-macros/i18806/Macro_1.scala new file mode 100644 index 000000000000..461080b67b95 --- /dev/null +++ b/tests/run-macros/i18806/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted._ + +@experimental +class gen1 extends MacroAnnotation: + def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + import quotes.reflect._ + tree match + case ClassDef(name, ctr, parents, self, body) => + val cls = tree.symbol + // val meth = cls.methodMember("foo").head + // val fooTpe = cls.typeRef.memberType(meth) + + val overrideTpe = MethodType(Nil)(_ => Nil, _ => defn.StringClass.typeRef) + + val fooOverrideSym = Symbol.newMethod(cls, "foo", overrideTpe, Flags.Override, Symbol.noSymbol) + + val fooDef = DefDef(fooOverrideSym, _ => Some(Literal(StringConstant("hi")))) + + val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, fooDef :: body) + List(newClassDef) + case _ => + report.error("Annotation only supports `class`") + List(tree) diff --git a/tests/run-macros/i18806/Test_2.scala b/tests/run-macros/i18806/Test_2.scala new file mode 100644 index 000000000000..3db56c895bdd --- /dev/null +++ b/tests/run-macros/i18806/Test_2.scala @@ -0,0 +1,14 @@ +import scala.annotation.experimental + +class Base: + def foo(): Object = ??? + +@experimental +@gen1 +class Sub extends Base +// > override def foo(): String = "hi" + +@experimental +@main def Test(): Unit = + val sub = new Sub + println(sub.foo()) diff --git a/tests/run/quotes-add-erased/Macro_1.scala b/tests/run/quotes-add-erased/Macro_1.scala index 66f8475da96d..56247d45cd23 100644 --- a/tests/run/quotes-add-erased/Macro_1.scala +++ b/tests/run/quotes-add-erased/Macro_1.scala @@ -15,7 +15,7 @@ class erasedParamsMethod extends MacroAnnotation: assert(methType.hasErasedParams) assert(methType.erasedParams == List(true, false)) - val methSym = Symbol.newMethod(tree.symbol, "takesErased", methType, Flags.EmptyFlags, Symbol.noSymbol) + val methSym = Symbol.newMethod(tree.symbol, "takesErased", methType, Flags.Override, Symbol.noSymbol) val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) val clsDef = ClassDef.copy(tree)(name, ctr, parents, self, methDef :: body) From e149e4c62206088d18e82b8b41932abc2a28f19d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Wed, 8 Nov 2023 19:14:22 +0100 Subject: [PATCH 168/216] Add missing -Yexplicit-nulls for presentation compiler (#18776) With a new definition of untyped trees, it is now required to have `-Yexplicit-nulls` flag in modules that use them in order to have proper type checking. This PR adds the missing flag. Without the flag, it was possible to first assign untyped trees to typed trees, and secondly use extension methods for typed trees which can be seen at `KeywordsCompletions.scala` with usage of `untpdTree.filterSubtrees`. It is also blocked by: https://github.com/lampepfl/dotty/issues/18775 I can also make a workaround in the unmanaged module, but it will require a dependency on nightly version / a new release from metals. --- .../src/main/dotty/tools/pc/AutoImports.scala | 4 +- .../dotty/tools/pc/AutoImportsProvider.scala | 13 ++- .../tools/pc/CompilerSearchVisitor.scala | 6 +- .../tools/pc/CompletionItemResolver.scala | 10 +- .../pc/ConvertToNamedArgumentsProvider.scala | 11 +-- .../tools/pc/ExtractMethodProvider.scala | 8 +- .../main/dotty/tools/pc/HoverProvider.scala | 17 ++-- .../main/dotty/tools/pc/IndexedContext.scala | 6 +- .../dotty/tools/pc/InferredTypeProvider.scala | 31 +++---- .../main/dotty/tools/pc/MetalsDriver.scala | 2 +- .../src/main/dotty/tools/pc/PcCollector.scala | 13 ++- .../dotty/tools/pc/PcDefinitionProvider.scala | 7 +- .../tools/pc/PcInlineValueProviderImpl.scala | 4 +- .../tools/pc/PcSemanticTokensProvider.scala | 2 +- .../tools/pc/ScalaPresentationCompiler.scala | 38 ++++---- .../tools/pc/SelectionRangeProvider.scala | 5 +- .../pc/SemanticdbTextDocumentProvider.scala | 12 +-- .../tools/pc/SignatureHelpProvider.scala | 24 ++--- .../src/main/dotty/tools/pc/TastyUtils.scala | 6 +- .../completions/AmmoniteFileCompletions.scala | 38 ++++---- .../completions/AmmoniteIvyCompletions.scala | 4 +- .../tools/pc/completions/CompletionPos.scala | 17 +--- .../pc/completions/CompletionProvider.scala | 41 ++++----- .../pc/completions/CompletionValue.scala | 2 +- .../tools/pc/completions/Completions.scala | 22 ++--- .../completions/InterpolatorCompletions.scala | 2 +- .../pc/completions/KeywordsCompletions.scala | 80 ++++++++-------- .../pc/completions/MatchCaseCompletions.scala | 25 ++--- .../MultilineCommentCompletion.scala | 2 +- .../pc/completions/NamedArgCompletions.scala | 11 ++- .../pc/completions/OverrideCompletions.scala | 92 ++++++++++--------- .../pc/completions/ScaladocCompletions.scala | 2 +- .../pc/printer/ShortenedTypePrinter.scala | 4 +- .../tools/pc/utils/MtagsEnrichments.scala | 24 ++--- .../tools/pc/base/BaseAutoImportsSuite.scala | 1 + .../tools/pc/base/BaseCodeActionSuite.scala | 1 + .../tools/pc/base/BaseCompletionSuite.scala | 1 + .../pc/base/BaseDocumentHihglightSuite.scala | 1 + .../pc/base/BaseExtractMethodSuite.scala | 1 + .../dotty/tools/pc/base/BaseHoverSuite.scala | 1 + .../dotty/tools/pc/base/BasePCSuite.scala | 1 + .../tools/pc/base/BasePcDefinitionSuite.scala | 1 + .../tools/pc/base/BasePcRenameSuite.scala | 1 + .../pc/base/BaseSelectionRangeSuite.scala | 1 + .../pc/base/BaseSemanticTokensSuite.scala | 1 + .../pc/base/BaseSignatureHelpSuite.scala | 1 + .../tools/pc/base/ReusableClassRunner.scala | 1 + .../pc/tests/CompilerJobQueueSuite.scala | 1 + .../tools/pc/tests/PcSemanticdbSuite.scala | 1 + .../completion/CompletionCancelSuite.scala | 1 + .../completion/CompletionKeywordSuite.scala | 13 +++ .../pc/tests/completion/CompletionSuite.scala | 1 + .../tests/definition/PcDefinitionSuite.scala | 1 + .../definition/TypeDefinitionSuite.scala | 1 + .../AutoImplementAbstractMembersSuite.scala | 1 + .../edit/ConvertToNamedArgumentsSuite.scala | 4 +- .../pc/tests/edit/InlineValueSuite.scala | 1 + .../tests/edit/InsertInferredTypeSuite.scala | 1 + .../tools/pc/utils/MockSymbolSearch.scala | 1 + .../dotty/tools/pc/utils/PcAssertions.scala | 2 + .../dotty/tools/pc/utils/RangeReplace.scala | 1 + .../tools/pc/utils/TestCompletions.scala | 1 + .../dotty/tools/pc/utils/TestHovers.scala | 1 + .../tools/pc/utils/TestSemanticTokens.scala | 1 + .../pc/utils/TestingWorkspaceSearch.scala | 1 + .../test/dotty/tools/pc/utils/TextEdits.scala | 1 + project/Build.scala | 8 +- .../AbstractMemberSignaturesTest.scala | 5 +- 68 files changed, 332 insertions(+), 314 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index 4a204105d7b2..0ccaec14927c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -23,7 +23,7 @@ object AutoImports: def renameConfigMap(config: PresentationCompilerConfig)(using Context ): Map[Symbol, String] = - config.symbolPrefixes.asScala.flatMap { (from, to) => + config.symbolPrefixes().nn.asScala.flatMap { (from, to) => val pkg = SemanticdbSymbols.inverseSemanticdbSymbol(from) val rename = to.stripSuffix(".").stripSuffix("#") List(pkg, pkg.map(_.moduleClass)).flatten @@ -246,7 +246,7 @@ object AutoImports: // see WorksheetProvider.worksheetScala3AdjustmentsForPC val indent = if pos.source.path.isWorksheet && - editPos.getStart().getCharacter() == 0 + editPos.getStart().nn.getCharacter() == 0 then indent0.drop(2) else indent0 val topPadding = diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index 6a1b91cba31f..b95c5fb949e0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -29,13 +29,12 @@ final class AutoImportsProvider( )(using ReportContext): def autoImports(isExtension: Boolean): List[AutoImportsResult] = - val uri = params.uri + val uri = params.uri().nn + val text = params.text().nn val filePath = Paths.get(uri) - driver.run( - uri, - SourceFile.virtual(filePath.toString, params.text) - ) - val unit = driver.currentCtx.run.units.head + driver.run(uri, SourceFile.virtual(filePath.toString, text)) + + val unit = driver.currentCtx.run.nn.units.head val tree = unit.tpdTree val pos = driver.sourcePosition(params) @@ -81,7 +80,7 @@ final class AutoImportsProvider( val generator = AutoImports.generator( correctedPos, - params.text, + text, tree, unit.comments, indexedContext.importContext, diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala index 2f7ee282450c..d217a0acd9b1 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala @@ -17,7 +17,7 @@ class CompilerSearchVisitor( )(using ctx: Context, reports: ReportContext) extends SymbolSearchVisitor: - val logger: Logger = Logger.getLogger(classOf[CompilerSearchVisitor].getName) + val logger: Logger = Logger.getLogger(classOf[CompilerSearchVisitor].getName().nn).nn private def isAccessible(sym: Symbol): Boolean = try sym != NoSymbol && sym.isPublic && sym.isStatic @@ -68,7 +68,7 @@ class CompilerSearchVisitor( .split("\\$") val added = - try toSymbols(pkg, innerPath.toList).filter(visitSymbol) + try toSymbols(pkg, innerPath.nn.toList.map(_.nn)).filter(visitSymbol) catch case NonFatal(e) => logger.log(Level.WARNING, e.getMessage(), e) @@ -95,6 +95,6 @@ class CompilerSearchVisitor( override def isCancelled: Boolean = false private def normalizePackage(pkg: String): String = - pkg.replace("/", ".").stripSuffix(".") + pkg.replace("/", ".").nn.stripSuffix(".") end CompilerSearchVisitor diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala index d393e9204c27..4a20ab0f8e5f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala @@ -30,7 +30,7 @@ object CompletionItemResolver extends ItemResolver: .orElse( search.symbolDocumentation(gsym.companion) ) match - case Some(info) if item.getDetail != null => + case Some(info) if item.getDetail() != null => enrichDocs( item, info, @@ -50,7 +50,7 @@ object CompletionItemResolver extends ItemResolver: Context ): String = def docs(gsym: Symbol): String = - search.symbolDocumentation(gsym).fold("")(_.docstring()) + search.symbolDocumentation(gsym).fold("")(_.docstring().nn) val gsymDoc = docs(gsym) def keyword(gsym: Symbol): String = if gsym.isClass then "class" @@ -60,7 +60,7 @@ object CompletionItemResolver extends ItemResolver: else "" val companion = gsym.companion if companion == NoSymbol || gsym.is(JavaDefined) then - if gsymDoc.isEmpty then + if gsymDoc.isEmpty() then if gsym.isAliasType then fullDocstring(gsym.info.metalsDealias.typeSymbol, search) else if gsym.is(Method) then @@ -73,8 +73,8 @@ object CompletionItemResolver extends ItemResolver: else gsymDoc else val companionDoc = docs(companion) - if companionDoc.isEmpty then gsymDoc - else if gsymDoc.isEmpty then companionDoc + if companionDoc.isEmpty() then gsymDoc + else if gsymDoc.isEmpty() then companionDoc else List( s"""|### ${keyword(companion)} ${companion.name} diff --git a/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala index 99cc82cdf6a1..817ab5402c00 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala @@ -23,13 +23,12 @@ final class ConvertToNamedArgumentsProvider( ): def convertToNamedArguments: Either[String, List[l.TextEdit]] = - val uri = params.uri + val uri = params.uri().nn + val text = params.text().nn val filePath = Paths.get(uri) - driver.run( - uri, - SourceFile.virtual(filePath.toString, params.text) - ) - val unit = driver.currentCtx.run.units.head + driver.run(uri, SourceFile.virtual(filePath.toString, text)) + + val unit = driver.currentCtx.run.nn.units.head val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) val pos = driver.sourcePosition(params) val trees = driver.openedTrees(uri) diff --git a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala index cbdc39a90118..0b5fd1b06a8f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala @@ -38,12 +38,12 @@ final class ExtractMethodProvider( extends ExtractMethodUtils: def extractMethod(): List[TextEdit] = - val text = range.text() - val uri = range.uri + val text = range.text().nn + val uri = range.uri().nn val filePath = Paths.get(uri) val source = SourceFile.virtual(filePath.toString, text) driver.run(uri, source) - val unit = driver.currentCtx.run.units.head + val unit = driver.currentCtx.run.nn.units.head val pos = driver.sourcePosition(range).startPos val path = Interactive.pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) @@ -145,7 +145,7 @@ final class ExtractMethodProvider( val oldIndentLen = head.startPos.startColumnPadding.length() val toExtract = textToExtract( - range.text(), + text, head.startPos.start, expr.endPos.end, newIndent, diff --git a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala index 1ddb79fabc98..545607c0b8ff 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala @@ -32,8 +32,9 @@ object HoverProvider: driver: InteractiveDriver, search: SymbolSearch )(implicit reportContext: ReportContext): ju.Optional[HoverSignature] = - val uri = params.uri - val sourceFile = SourceFile.virtual(params.uri, params.text) + val uri = params.uri().nn + val text = params.text().nn + val sourceFile = SourceFile.virtual(uri, text) driver.run(uri, sourceFile) given ctx: Context = driver.currentCtx @@ -54,7 +55,7 @@ object HoverProvider: then def report = val posId = - if path.isEmpty || path.head.sourcePos == null || !path.head.sourcePos.exists + if path.isEmpty || !path.head.sourcePos.exists then pos.start else path.head.sourcePos.start Report( @@ -77,7 +78,7 @@ object HoverProvider: ) end report reportContext.unsanitized.create(report, ifVerbose = true) - ju.Optional.empty() + ju.Optional.empty().nn else val skipCheckOnName = !pos.isPoint // don't check isHoveringOnName for RangeHover @@ -125,7 +126,7 @@ object HoverProvider: val docString = symbolTpes .flatMap(symTpe => search.symbolDocumentation(symTpe._1)) - .map(_.docstring) + .map(_.docstring()) .mkString("\n") printer.expressionType(exprTpw) match case Some(expressionType) => @@ -143,9 +144,9 @@ object HoverProvider: docstring = Some(docString), forceExpressionType = forceExpressionType ) - ) + ).nn case _ => - ju.Optional.empty + ju.Optional.empty().nn end match end match end if @@ -188,7 +189,7 @@ object HoverProvider: refTpe.flatMap(findRefinement).asJava case _ => - ju.Optional.empty() + ju.Optional.empty().nn end HoverProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala index 03bc711e8f18..e31f4756b220 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala @@ -7,6 +7,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameOps.moduleClassName import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.Scopes.EmptyScope import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.typer.ImportInfo @@ -82,7 +83,6 @@ object IndexedContext: def apply(ctx: Context): IndexedContext = ctx match - case null => Empty case NoContext => Empty case _ => LazyWrapper(using ctx) @@ -205,14 +205,14 @@ object IndexedContext: val (symbols, renames) = if ctx.isImportContext then val (syms, renames) = - fromImportInfo(ctx.importInfo) + fromImportInfo(ctx.importInfo.nn) .map((sym, rename) => (sym, rename.map(r => sym -> r.decoded))) .unzip (syms, renames.flatten.toMap) else if ctx.owner.isClass then val site = ctx.owner.thisType (accesibleMembers(site), Map.empty) - else if ctx.scope != null then (ctx.scope.toList, Map.empty) + else if ctx.scope != EmptyScope then (ctx.scope.toList, Map.empty) else (List.empty, Map.empty) val initial = Map.empty[String, List[Symbol]] diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala index 578353ef4c90..69d89d5b0d13 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala @@ -62,14 +62,14 @@ final class InferredTypeProvider( adjustOpt: Option[AdjustTypeOpts] = None ): List[TextEdit] = val retryType = adjustOpt.isEmpty - val uri = params.uri - val filePath = Paths.get(uri) + val uri = params.uri().nn + val filePath = Paths.get(uri).nn - val sourceText = adjustOpt.map(_.text).getOrElse(params.text) + val sourceText = adjustOpt.map(_.text).getOrElse(params.text().nn) val source = - SourceFile.virtual(filePath.toString, sourceText) + SourceFile.virtual(filePath.toString(), sourceText) driver.run(uri, source) - val unit = driver.currentCtx.run.units.head + val unit = driver.currentCtx.run.nn.units.head val pos = driver.sourcePosition(params) val path = Interactive.pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) @@ -78,7 +78,7 @@ final class InferredTypeProvider( val indexedCtx = IndexedContext(locatedCtx) val autoImportsGen = AutoImports.generator( pos, - params.text, + sourceText, unit.tpdTree, unit.comments, indexedCtx, @@ -86,7 +86,7 @@ final class InferredTypeProvider( ) def removeType(nameEnd: Int, tptEnd: Int) = - sourceText.substring(0, nameEnd) + + sourceText.substring(0, nameEnd).nn + sourceText.substring(tptEnd + 1, sourceText.length()) def optDealias(tpe: Type): Type = @@ -134,7 +134,7 @@ final class InferredTypeProvider( def baseEdit(withParens: Boolean): TextEdit = val keywordOffset = if isParam then 0 else 4 val endPos = - findNamePos(params.text, vl, keywordOffset).endPos.toLsp + findNamePos(sourceText, vl, keywordOffset).endPos.toLsp adjustOpt.foreach(adjust => endPos.setEnd(adjust.adjustedEndPos)) new TextEdit( endPos, @@ -148,11 +148,10 @@ final class InferredTypeProvider( toCheckFor: Char, blockStartPos: SourcePosition ) = - val text = params.text - val isParensFunction: Boolean = text(applyEndingPos) == toCheckFor + val isParensFunction: Boolean = sourceText(applyEndingPos) == toCheckFor val alreadyHasParens = - text(blockStartPos.start) == '(' + sourceText(blockStartPos.start) == '(' if isParensFunction && !alreadyHasParens then new TextEdit(blockStartPos.toLsp, "(") :: baseEdit(withParens = @@ -188,7 +187,7 @@ final class InferredTypeProvider( Some( AdjustTypeOpts( removeType(vl.namePos.end, tpt.sourcePos.end - 1), - tpt.sourcePos.toLsp.getEnd() + tpt.sourcePos.toLsp.getEnd().nn ) ) ) @@ -227,7 +226,7 @@ final class InferredTypeProvider( Some( AdjustTypeOpts( removeType(lastColon, tpt.sourcePos.end - 1), - tpt.sourcePos.toLsp.getEnd() + tpt.sourcePos.toLsp.getEnd().nn ) ) ) @@ -256,8 +255,8 @@ final class InferredTypeProvider( val firstEnd = patterns(0).endPos.end val secondStart = patterns(1).startPos.start val hasDot = params - .text() - .substring(firstEnd, secondStart) + .text().nn + .substring(firstEnd, secondStart).nn .exists(_ == ',') if !hasDot then val leftParen = new TextEdit(body.startPos.toLsp, "(") @@ -309,7 +308,7 @@ final class InferredTypeProvider( val end = if withBacktick then idx + 1 else idx val pos = tree.source.atSpan(Span(start, end, start)) Some(pos) - case None if idx < text.length => + case None if idx < text.length() => val ch = text.charAt(idx) if ch == realName.head then lookup(idx + 1, Some((idx, realName.tail)), withBacktick) diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala index 4f7ed751f958..55504db7a11a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala @@ -48,7 +48,7 @@ class MetalsDriver( override def run(uri: URI, sourceCode: String): List[Diagnostic] = val diags = - if alreadyCompiled(uri, sourceCode.toCharArray()) then Nil + if alreadyCompiled(uri, sourceCode.toCharArray().nn) then Nil else super.run(uri, sourceCode) lastCompiledURI = uri diags diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index 8ffd8ed28044..cf9e31cd4524 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -33,15 +33,15 @@ abstract class PcCollector[T]( params: VirtualFileParams ): private val caseClassSynthetics: Set[Name] = Set(nme.apply, nme.copy) - val uri = params.uri() - val filePath = Paths.get(uri) - val sourceText = params.text + val uri = params.uri().nn + val filePath = Paths.get(uri).nn + val sourceText = params.text().nn val source = - SourceFile.virtual(filePath.toString, sourceText) + SourceFile.virtual(filePath.toString(), sourceText) driver.run(uri, source) given ctx: Context = driver.currentCtx - val unit = driver.currentCtx.run.units.head + val unit = driver.currentCtx.run.nn.units.head val compilatonUnitContext = ctx.fresh.setCompilationUnit(unit) val offset = params match case op: OffsetParams => op.offset() @@ -49,8 +49,7 @@ abstract class PcCollector[T]( val offsetParams = params match case op: OffsetParams => op - case _ => - CompilerOffsetParams(params.uri(), params.text(), 0, params.token()) + case _ => CompilerOffsetParams(uri, sourceText, 0, params.token().nn) val pos = driver.sourcePosition(offsetParams) val rawPath = Interactive diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index 5d80b7d9be48..f010c8b2d95a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -36,11 +36,12 @@ class PcDefinitionProvider( definitions(findTypeDef = true) private def definitions(findTypeDef: Boolean): DefinitionResult = - val uri = params.uri + val uri = params.uri().nn + val text = params.text().nn val filePath = Paths.get(uri) driver.run( uri, - SourceFile.virtual(filePath.toString, params.text) + SourceFile.virtual(filePath.toString, text) ) val pos = driver.sourcePosition(params) @@ -53,7 +54,7 @@ class PcDefinitionProvider( if findTypeDef then findTypeDefinitions(path, pos, indexedContext) else findDefinitions(path, pos, indexedContext) - if result.locations().isEmpty() then fallbackToUntyped(pos)(using ctx) + if result.locations().nn.isEmpty() then fallbackToUntyped(pos)(using ctx) else result end definitions diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala index 2d4a9d8643c9..e591f89f0152 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala @@ -27,9 +27,9 @@ final class PcInlineValueProviderImpl( ) extends PcCollector[Option[Occurence]](driver, params) with InlineValueProvider: - val text = params.text.toCharArray() + val text = params.text().nn.toCharArray().nn - val position: l.Position = pos.toLsp.getStart() + val position: l.Position = pos.toLsp.getStart().nn override def collect(parent: Option[Tree])( tree: Tree | EndMarker, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala index d70fa32c2b10..92efcb034564 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala @@ -69,7 +69,7 @@ final class PcSemanticTokensProvider( case tree: Tree => symbol.fold(tree.symbol)(identity) case EndMarker(sym) => sym - if !pos.exists || sym == null || sym == NoSymbol then None + if !pos.exists || sym == NoSymbol then None else Some( makeNode( diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 3c201cb4634a..4dc9cd6d743f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -12,6 +12,7 @@ import java.util as ju import scala.concurrent.ExecutionContext import scala.concurrent.ExecutionContextExecutor import scala.jdk.CollectionConverters._ +import scala.language.unsafeNulls import scala.meta.internal.metals.CompilerVirtualFileParams import scala.meta.internal.metals.EmptyCancelToken import scala.meta.internal.metals.EmptyReportContext @@ -36,6 +37,7 @@ import org.eclipse.lsp4j as l case class ScalaPresentationCompiler( buildTargetIdentifier: String = "", + buildTargetName: Option[String] = None, classpath: Seq[Path] = Nil, options: List[String] = Nil, search: SymbolSearch = EmptySymbolSearch, @@ -46,17 +48,21 @@ case class ScalaPresentationCompiler( reportsLevel: ReportLevel = ReportLevel.Info ) extends PresentationCompiler: - def this() = this("", Nil, Nil) + def this() = this("", None, Nil, Nil) val scalaVersion = BuildInfo.scalaVersion private val forbiddenOptions = Set("-print-lines", "-print-tasty") private val forbiddenDoubleOptions = Set("-release") + given ReportContext = folderPath - .map(StdReportContext(_, reportsLevel)) + .map(StdReportContext(_, _ => buildTargetName, reportsLevel)) .getOrElse(EmptyReportContext) + override def withBuildTargetName(buildTargetName: String) = + copy(buildTargetName = Some(buildTargetName)) + override def withReportsLoggerLevel(level: String): PresentationCompiler = copy(reportsLevel = ReportLevel.fromString(level)) @@ -111,7 +117,7 @@ case class ScalaPresentationCompiler( def complete(params: OffsetParams): CompletableFuture[l.CompletionList] = compilerAccess.withInterruptableCompiler(Some(params))( EmptyCompletionList(), - params.token + params.token() ) { access => val driver = access.compiler() new CompletionProvider( @@ -128,7 +134,7 @@ case class ScalaPresentationCompiler( def definition(params: OffsetParams): CompletableFuture[DefinitionResult] = compilerAccess.withInterruptableCompiler(Some(params))( DefinitionResultImpl.empty, - params.token + params.token() ) { access => val driver = access.compiler() PcDefinitionProvider(driver, params, search).definitions() @@ -139,7 +145,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[DefinitionResult] = compilerAccess.withInterruptableCompiler(Some(params))( DefinitionResultImpl.empty, - params.token + params.token() ) { access => val driver = access.compiler() PcDefinitionProvider(driver, params, search).typeDefinitions() @@ -150,7 +156,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[ju.List[DocumentHighlight]] = compilerAccess.withInterruptableCompiler(Some(params))( List.empty[DocumentHighlight].asJava, - params.token + params.token() ) { access => val driver = access.compiler() PcDocumentHighlightProvider(driver, params).highlights.asJava @@ -202,7 +208,7 @@ case class ScalaPresentationCompiler( ] = compilerAccess.withNonInterruptableCompiler(Some(params))( List.empty[scala.meta.pc.AutoImportsResult].asJava, - params.token + params.token() ) { access => val driver = access.compiler() new AutoImportsProvider( @@ -223,7 +229,7 @@ case class ScalaPresentationCompiler( val empty: ju.List[l.TextEdit] = new ju.ArrayList[l.TextEdit]() compilerAccess.withNonInterruptableCompiler(Some(params))( empty, - params.token + params.token() ) { pc => val driver = pc.compiler() OverrideCompletions.implementAllAt( @@ -241,7 +247,7 @@ case class ScalaPresentationCompiler( val empty: ju.List[l.TextEdit] = new ju.ArrayList[l.TextEdit]() compilerAccess.withNonInterruptableCompiler(Some(params))( empty, - params.token + params.token() ) { pc => new InferredTypeProvider(params, pc.compiler(), config, search) .inferredTypeEdits() @@ -253,7 +259,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[ju.List[l.TextEdit]] = val empty: Either[String, List[l.TextEdit]] = Right(List()) (compilerAccess - .withInterruptableCompiler(Some(params))(empty, params.token) { pc => + .withInterruptableCompiler(Some(params))(empty, params.token()) { pc => new PcInlineValueProviderImpl(pc.compiler(), params) .getInlineTextEdits() }) @@ -268,7 +274,7 @@ case class ScalaPresentationCompiler( extractionPos: OffsetParams ): CompletableFuture[ju.List[l.TextEdit]] = val empty: ju.List[l.TextEdit] = new ju.ArrayList[l.TextEdit]() - compilerAccess.withInterruptableCompiler(Some(range))(empty, range.token) { + compilerAccess.withInterruptableCompiler(Some(range))(empty, range.token()) { pc => new ExtractMethodProvider( range, @@ -288,7 +294,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[ju.List[l.TextEdit]] = val empty: Either[String, List[l.TextEdit]] = Right(List()) (compilerAccess - .withNonInterruptableCompiler(Some(params))(empty, params.token) { pc => + .withNonInterruptableCompiler(Some(params))(empty, params.token()) { pc => new ConvertToNamedArgumentsProvider( pc.compiler(), params, @@ -320,7 +326,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[ju.Optional[HoverSignature]] = compilerAccess.withNonInterruptableCompiler(Some(params))( ju.Optional.empty[HoverSignature](), - params.token + params.token() ) { access => val driver = access.compiler() HoverProvider.hover(params, driver, search) @@ -332,7 +338,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[ju.Optional[l.Range]] = compilerAccess.withNonInterruptableCompiler(Some(params))( Optional.empty[l.Range](), - params.token + params.token() ) { access => val driver = access.compiler() Optional.ofNullable( @@ -346,7 +352,7 @@ case class ScalaPresentationCompiler( ): CompletableFuture[ju.List[l.TextEdit]] = compilerAccess.withNonInterruptableCompiler(Some(params))( List[l.TextEdit]().asJava, - params.token + params.token() ) { access => val driver = access.compiler() PcRenameProvider(driver, params, Some(name)).rename().asJava @@ -366,7 +372,7 @@ case class ScalaPresentationCompiler( def signatureHelp(params: OffsetParams): CompletableFuture[l.SignatureHelp] = compilerAccess.withNonInterruptableCompiler(Some(params))( new l.SignatureHelp(), - params.token + params.token() ) { access => val driver = access.compiler() SignatureHelpProvider.signatureHelp(driver, params, search) diff --git a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala index f0268baaaf23..a7d07b12f40c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala @@ -36,9 +36,10 @@ class SelectionRangeProvider( params.asScala.toList.map { param => - val uri = param.uri + val uri = param.uri().nn + val text = param.text().nn val filePath = Paths.get(uri) - val source = SourceFile.virtual(filePath.toString, param.text) + val source = SourceFile.virtual(filePath.toString, text) driver.run(uri, source) val pos = driver.sourcePosition(param) val path = diff --git a/presentation-compiler/src/main/dotty/tools/pc/SemanticdbTextDocumentProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SemanticdbTextDocumentProvider.scala index 22975c2eefcb..43e75a4d6130 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SemanticdbTextDocumentProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SemanticdbTextDocumentProvider.scala @@ -25,13 +25,13 @@ class SemanticdbTextDocumentProvider( uri: URI, sourceCode: String ): Array[Byte] = - val filePath = Paths.get(uri) + val filePath = Paths.get(uri).nn val validCode = removeMagicImports(sourceCode, filePath) driver.run( uri, - SourceFile.virtual(filePath.toString, validCode) + SourceFile.virtual(filePath.toString(), validCode) ) - val tree = driver.currentCtx.run.units.head.tpdTree + val tree = driver.currentCtx.run.nn.units.head.tpdTree val extractor = ExtractSemanticDB.Extractor() extractor.traverse(tree)(using driver.currentCtx) val path = workspace @@ -42,12 +42,12 @@ class SemanticdbTextDocumentProvider( if Properties.isWin then relativeUri.toString().replace("\\", "/") else relativeUri.toString() } - .getOrElse(filePath.toString) + .getOrElse(filePath.toString()) val document = TextDocument( schema = Schema.SEMANTICDB4, language = Language.SCALA, - uri = path, + uri = path.nn, text = sourceCode, md5 = MD5.compute(sourceCode), symbols = extractor.symbolInfos.toList, @@ -57,7 +57,7 @@ class SemanticdbTextDocumentProvider( val out = SemanticdbOutputStream.newInstance(byteStream) document.writeTo(out) out.flush() - byteStream.toByteArray + byteStream.toByteArray().nn end textDocument end SemanticdbTextDocumentProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index f020a4d999d0..46ca85af7319 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -28,14 +28,14 @@ object SignatureHelpProvider: params: OffsetParams, search: SymbolSearch ) = - val uri = params.uri - val sourceFile = SourceFile.virtual(params.uri, params.text) - driver.run(uri, sourceFile) + val uri = params.uri() + val sourceFile = SourceFile.virtual(params.uri().nn, params.text().nn) + driver.run(uri.nn, sourceFile) given ctx: Context = driver.currentCtx val pos = driver.sourcePosition(params) - val trees = driver.openedTrees(uri) + val trees = driver.openedTrees(uri.nn) val path = Interactive.pathTo(trees, pos).dropWhile(t => notCurrentApply(t, pos)) @@ -101,7 +101,7 @@ object SignatureHelpProvider: signature: Signatures.Signature, isJavaSymbol: Boolean ): Option[Signature] = - val allParams = info.parameters.asScala + val allParams = info.parameters().nn.asScala def updateParams( params: List[Signatures.Param], index: Int @@ -114,11 +114,11 @@ object SignatureHelpProvider: case Some(paramDoc) => val newName = if isJavaSymbol && head.name.startsWith("x$") then - paramDoc.displayName + paramDoc.nn.displayName() else head.name head.copy( - doc = Some(paramDoc.docstring), - name = newName + doc = Some(paramDoc.docstring.nn), + name = newName.nn ) :: rest case _ => head :: rest @@ -132,7 +132,7 @@ object SignatureHelpProvider: val updated = updateParams(head, index) updated :: updateParamss(tail, index + head.size) val updatedParams = updateParamss(signature.paramss, 0) - Some(signature.copy(doc = Some(info.docstring), paramss = updatedParams)) + Some(signature.copy(doc = Some(info.docstring().nn), paramss = updatedParams)) end withDocumentation private def signatureToSignatureInformation( @@ -174,12 +174,12 @@ object SignatureHelpProvider: documentation.foreach(info.setDocumentation(_)) info - private def markupContent(content: String): l.MarkupContent = - if content.isEmpty then null + private def markupContent(content: String): l.MarkupContent | Null = + if content.isEmpty() then null else val markup = new l.MarkupContent markup.setKind("markdown") - markup.setValue(content.trim) + markup.setValue(content.trim()) markup end SignatureHelpProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala index f4970424a490..747f104cfede 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala @@ -21,7 +21,7 @@ object TastyUtils: private def normalTasty(tastyURI: URI): String = val tastyBytes = Files.readAllBytes(Paths.get(tastyURI)) - new TastyPrinter(tastyBytes).showContents() + new TastyPrinter(tastyBytes.nn).showContents() private def htmlTasty( tastyURI: URI, @@ -30,7 +30,7 @@ object TastyUtils: ): String = val title = tastyHtmlPageTitle(tastyURI) val tastyBytes = Files.readAllBytes(Paths.get(tastyURI)) - val tastyHtml = new TastyHTMLPrinter(tastyBytes).showContents() + val tastyHtml = new TastyHTMLPrinter(tastyBytes.nn).showContents() HtmlBuilder() .page(title, htmlStyles :: headElems, bodyAttributes) { builder => builder @@ -40,7 +40,7 @@ object TastyUtils: end htmlTasty private def tastyHtmlPageTitle(file: URI) = - val filename = Paths.get(file).getFileName.toString + val filename = Paths.get(file).nn.getFileName().toString s"TASTy for $filename" private val standaloneHtmlStyles = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala index a2c74ef903ae..31bf7c348119 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala @@ -38,15 +38,11 @@ object AmmoniteFileCompletions: workspace: Option[Path], rawFileName: String )(using Context): List[CompletionValue] = + val fileName: Option[String] = Option(rawFileName) + .flatMap(_.split("/").nn.lastOption.map(_.nn.stripSuffix(".amm.sc.scala"))) - val fileName = rawFileName - .split("/") - .last - .stripSuffix(".amm.sc.scala") - - val split = rawPath - .split("\\$file") - .toList + val split: List[String] = Option(rawPath) + .fold(Nil)(_.split("\\$file").nn.toList.map(_.nn)) val editRange = selector.headOption.map { sel => if sel.sourcePos.span.isZeroExtent then posRange @@ -66,35 +62,33 @@ object AmmoniteFileCompletions: isDirectory = true ) + def matches(file: Path): Boolean = + (Files.isDirectory(file) || file.toAbsolutePath().toString.isAmmoniteScript) && + query.exists(q => CompletionFuzzy.matches(q.nn, file.getFileName().toString)) + (split, workspace) match case (_ :: script :: Nil, Some(workspace)) => // drop / or \ val current = workspace.resolve(script.drop(1)) val importPath = translateImportToPath(select).drop(1) - val currentPath = current.getParent.resolve(importPath).toAbsolutePath + val currentPath = current.nn.getParent().nn.resolve(importPath).nn.toAbsolutePath() val parentTextEdit = - if query.exists(_.isEmpty()) && - Files.exists(currentPath.getParent) && Files.isDirectory( + if query.exists(_.nn.isEmpty()) && + Files.exists(currentPath.nn.getParent()) && Files.isDirectory( currentPath ) then List(parent) else Nil Files - .list(currentPath) - .iterator + .list(currentPath).nn + .iterator().nn .asScala .toList - .filter(_.getFileName.toString.stripSuffix(".sc") != fileName) + .filter(path => !fileName.contains(path.nn.getFileName().toString.stripSuffix(".sc"))) .collect { - case file - if (Files.isDirectory( - file - ) || file.toAbsolutePath.toString.isAmmoniteScript) && - query.exists( - CompletionFuzzy.matches(_, file.getFileName.toString) - ) => + case file if matches(file) => CompletionValue.FileSystemMember( - file.getFileName.toString, + file.getFileName().toString, editRange, isDirectory = Files.isDirectory(file) ) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala index 4404dbdfb7cc..39f7144835c9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala @@ -23,7 +23,7 @@ object AmmoniteIvyCompletions: case None => Nil case Some(dependency) => val isInitialCompletion = - pos.lineContent.trim == "import $ivy." + pos.lineContent.trim() == "import $ivy." val ivyEditRange = if isInitialCompletion then completionPos.toEditRange else @@ -31,7 +31,7 @@ object AmmoniteIvyCompletions: val (rangeStart, rangeEnd) = CoursierComplete.inferEditRange(pos.point, text) pos.withStart(rangeStart).withEnd(rangeEnd).toLsp - val completions = coursierComplete.complete(dependency) + val completions = coursierComplete.complete(dependency.nn) completions .map(insertText => CompletionValue.IvyImport( diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala index 29699bd05203..a0cf6bafcf46 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala @@ -41,7 +41,7 @@ object CompletionPos: offsetParams: OffsetParams, treePath: List[Tree] )(using Context): CompletionPos = - infer(cursorPos, offsetParams.uri, offsetParams.text, treePath) + infer(cursorPos, offsetParams.uri().nn, offsetParams.text().nn, treePath) def infer( cursorPos: SourcePosition, @@ -55,18 +55,11 @@ object CompletionPos: val prevIsDot = if start - 1 >= 0 then text.charAt(start - 1) == '.' else false val kind = - if query.isEmpty && !prevIsDot then CompletionKind.Empty + if query.nn.isEmpty() && !prevIsDot then CompletionKind.Empty else if prevIsDot then CompletionKind.Members else CompletionKind.Scope - CompletionPos( - kind, - start, - end, - query, - cursorPos, - uri - ) + CompletionPos(kind, start, end, query.nn, cursorPos, uri) end infer /** @@ -80,7 +73,7 @@ object CompletionPos: ): (Int, Boolean) = var i = 0 var tabIndented = false - while lineOffset + i < text.length && { + while lineOffset + i < text.length() && { val char = text.charAt(lineOffset + i) if char == '\t' then tabIndented = true @@ -132,7 +125,7 @@ object CompletionPos: */ private def inferIdentEnd(pos: SourcePosition, text: String): Int = var i = pos.point - while i < text.length && Chars.isIdentifierPart(text.charAt(i)) do i += 1 + while i < text.length() && Chars.isIdentifierPart(text.charAt(i)) do i += 1 i end CompletionPos diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 323f63050377..13a6e7cdb7cb 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -41,10 +41,11 @@ class CompletionProvider( folderPath: Option[Path] )(using reports: ReportContext): def completions(): CompletionList = - val uri = params.uri + val uri = params.uri().nn + val text = params.text().nn val code = applyCompletionCursor(params) - val sourceFile = SourceFile.virtual(params.uri, code) + val sourceFile = SourceFile.virtual(uri, code) driver.run(uri, sourceFile) val ctx = driver.currentCtx @@ -66,7 +67,7 @@ class CompletionProvider( CompletionPos.infer(pos, params, path)(using newctx) val autoImportsGen = AutoImports.generator( completionPos.sourcePos, - params.text, + text, unit.tpdTree, unit.comments, indexedCtx, @@ -75,7 +76,7 @@ class CompletionProvider( val (completions, searchResult) = new Completions( pos, - params.text, + text, ctx.fresh.setCompilationUnit(unit), search, buildTargetIdentifier, @@ -124,22 +125,22 @@ class CompletionProvider( * because scala parser trim end position to the last statement pos. */ private def applyCompletionCursor(params: OffsetParams): String = - import params.* + val text = params.text().nn + val offset = params.offset().nn + val isStartMultilineComment = val i = params.offset() - i >= 3 && (params.text().charAt(i - 1) match + i >= 3 && (text.charAt(i - 1) match case '*' => - params.text().charAt(i - 2) == '*' && - params.text().charAt(i - 3) == '/' + text.charAt(i - 2) == '*' && + text.charAt(i - 3) == '/' case _ => false ) if isStartMultilineComment then // Insert potentially missing `*/` to avoid comment out all codes after the "/**". - text.substring(0, offset) + Cursor.value + "*/" + text.substring(offset) + text.substring(0, offset).nn + Cursor.value + "*/" + text.substring(offset) else - text.substring(0, offset) + Cursor.value + text.substring( - offset - ) + text.substring(0, offset).nn + Cursor.value + text.substring(offset) end applyCompletionCursor private def completionItems( @@ -166,7 +167,7 @@ class CompletionProvider( additionalEdits: List[TextEdit] = Nil, range: Option[LspRange] = None ): CompletionItem = - val oldText = params.text.substring(completionPos.start, completionPos.end) + val oldText = params.text().nn.substring(completionPos.start, completionPos.end) val editRange = if newText.startsWith(oldText) then completionPos.stripSuffixEditRange else completionPos.toEditRange @@ -186,7 +187,7 @@ class CompletionProvider( item.setTags(completion.lspTags.asJava) - if config.isCompletionSnippetsEnabled then + if config.isCompletionSnippetsEnabled() then item.setInsertTextFormat(InsertTextFormat.Snippet) completion.command.foreach { command => @@ -230,18 +231,10 @@ class CompletionProvider( case Some(edits) => edits match case AutoImportEdits(Some(nameEdit), other) => - mkItem( - nameEdit.getNewText(), - other.toList, - range = Some(nameEdit.getRange()) - ) + mkItem(nameEdit.getNewText().nn, other.toList, range = Some(nameEdit.getRange().nn)) case _ => mkItem( - v.insertText.getOrElse( - ident.backticked( - backtickSoftKeyword - ) + completionTextSuffix - ), + v.insertText.getOrElse( ident.backticked(backtickSoftKeyword) + completionTextSuffix), edits.edits, range = v.range ) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index c5232418a813..6011a1a3d660 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -162,7 +162,7 @@ object CompletionValue: tpe: Type, symbol: Symbol ) extends Symbolic: - override def insertText: Option[String] = Some(label.replace("$", "$$")) + override def insertText: Option[String] = Some(label.replace("$", "$$").nn) override def completionItemKind(using Context): CompletionItemKind = CompletionItemKind.Field override def description(printer: ShortenedTypePrinter)(using Context): String = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index aa3fa59e518e..557d1762720b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -87,7 +87,7 @@ class Completions( def hasSyntheticCursorSuffix: Boolean = if !sym.name.endsWith(Cursor.value) then false else - val realNameLength = sym.decodedName.length - Cursor.value.length + val realNameLength = sym.decodedName.length() - Cursor.value.length() sym.source == pos.source && sym.span.start + realNameLength == pos.span.end @@ -201,7 +201,7 @@ class Completions( paramss match case Nil => suffix case List(Nil) => suffix.withNewSuffix(SuffixKind.Brace) - case _ if config.isCompletionSnippetsEnabled => + case _ if config.isCompletionSnippetsEnabled() => val onlyParameterless = paramss.forall(_.isEmpty) lazy val onlyImplicitOrTypeParams = paramss.forall( _.exists { sym => @@ -273,7 +273,7 @@ class Completions( completionPos: CompletionPos ): (List[CompletionValue], Boolean) = lazy val rawPath = Paths - .get(pos.source.path) + .get(pos.source.path).nn lazy val rawFileName = rawPath .getFileName() .toString() @@ -391,7 +391,7 @@ class Completions( // class Fo@@ case (td: TypeDef) :: _ if Fuzzy.matches( - td.symbol.name.decoded.replace(Cursor.value, ""), + td.symbol.name.decoded.replace(Cursor.value, "").nn, filename ) => val values = FilenameCompletions.contribute(filename, td) @@ -452,7 +452,7 @@ class Completions( pos, path, indexedContext, - config.isCompletionSnippetsEnabled + config.isCompletionSnippetsEnabled() ) (args, false) end match @@ -517,7 +517,7 @@ class Completions( CompletionValue.Workspace(_, _, _, sym) ).map(visit).forall(_ == true), ) - Some(search.search(query, buildTargetIdentifier, visitor)) + Some(search.search(query, buildTargetIdentifier, visitor).nn) case CompletionKind.Members => val visitor = new CompilerSearchVisitor(sym => if sym.is(ExtensionMethod) && @@ -530,7 +530,7 @@ class Completions( ).map(visit).forall(_ == true) else false, ) - Some(search.searchMethods(query, buildTargetIdentifier, visitor)) + Some(search.searchMethods(query, buildTargetIdentifier, visitor).nn) end match end enrichWithSymbolSearch @@ -571,7 +571,7 @@ class Completions( val nameId = if sym.isClass || sym.is(Module) then // drop #|. at the end to avoid duplication - name.substring(0, name.length - 1) + name.substring(0, name.length() - 1).nn else name val suffix = if symOnly.snippetSuffix.addLabelSnippet then "[]" else "" @@ -693,7 +693,7 @@ class Completions( if !ov.symbol.is(Deferred) then penalty |= MemberOrdering.IsNotAbstract penalty case CompletionValue.Workspace(_, sym, _, _) => - symbolRelevance(sym) | (IsWorkspaceSymbol + sym.name.show.length) + symbolRelevance(sym) | (IsWorkspaceSymbol + sym.name.show.length()) case sym: CompletionValue.Symbolic => symbolRelevance(sym.symbol) case _ => @@ -782,9 +782,9 @@ class Completions( def fuzzyScore(o: CompletionValue.Symbolic): Int = fuzzyCache.getOrElseUpdate( o, { - val name = o.label.toLowerCase() + val name = o.label.toLowerCase().nn if name.startsWith(queryLower) then 0 - else if name.toLowerCase().contains(queryLower) then 1 + else if name.contains(queryLower) then 1 else 2 } ) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index 0a929c79dcc5..1d063bc6d873 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -35,7 +35,7 @@ object InterpolatorCompletions: config: PresentationCompilerConfig, buildTargetIdentifier: String )(using Context, ReportContext) = - InterpolationSplice(pos.span.point, text.toCharArray(), text) match + InterpolationSplice(pos.span.point, text.toCharArray().nn, text) match case Some(interpolator) => InterpolatorCompletions.contributeScope( text, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala index a5eb23b26ed3..9f1a5a0e9bff 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala @@ -1,10 +1,13 @@ package dotty.tools.pc.completions +import scala.collection.mutable.ListBuffer import scala.meta.internal.pc.Keyword import dotty.tools.dotc.ast.NavigateAST +import dotty.tools.dotc.ast.Positioned import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.untpd.UntypedTreeTraverser import dotty.tools.dotc.core.Comments import dotty.tools.dotc.core.Comments.Comment import dotty.tools.dotc.core.Contexts.Context @@ -23,7 +26,7 @@ object KeywordsCompletions: checkIfNotInComment(completionPos.cursorPos, comments) path match - case Nil if completionPos.query.isEmpty => + case Nil if completionPos.query.isEmpty() => Keyword.all.collect { // topelevel definitions are allowed in Scala 3 case kw if (kw.isPackage || kw.isTemplate) && notInComment => @@ -163,6 +166,19 @@ object KeywordsCompletions: def checkTemplateForNewParents(enclosing: List[Tree], pos: CompletionPos)( using ctx: Context ): TemplateKeywordAvailability = + + def collectTypeAndModuleDefs( + tree: untpd.Tree, + f: PartialFunction[untpd.Tree, Boolean] + )(using Context): List[untpd.Tree] = { + val buf = ListBuffer.empty[untpd.Tree] + val traverser = new UntypedTreeTraverser: + def traverse(tree: untpd.Tree)(using Context) = + foldOver(if f(tree) then buf += tree, tree) + traverser.traverse(tree) + buf.toList + } + /* * Finds tree which ends just before cursor positions, that may be extended or derive. * In Scala 3, such tree must be a `TypeDef` which has field of type `Template` describing @@ -172,44 +188,19 @@ object KeywordsCompletions: * * @returns TypeDef tree defined before the cursor position or `enclosingTree` otherwise */ - def findLastSatisfyingTree(span: Span): Option[Tree] = - NavigateAST.untypedPath(span).headOption.flatMap { - case other: untpd.Tree => - val typeDefs = other.filterSubTrees { - // package test - // class Test ext@@ - Interactive.pathTo returns `PackageDef` instead of `TypeDef` - // - because it tried to repair the broken tree by finishing `TypeDef` before ext - // - // The cursor position is 27 and tree positions after parsing are: - // - // package Test@../Test.sc<8..12> { - // class Test {}@../Test.sc[13..19..23] - // }@../Test.sc<0..27> + def findLastSatisfyingTree(untpdPath: List[Positioned]): Option[untpd.Tree] = + untpdPath.headOption.flatMap { + case untpdTree: untpd.Tree => + collectTypeAndModuleDefs(untpdTree, { case typeDef: (untpd.TypeDef | untpd.ModuleDef) => typeDef.span.exists && typeDef.span.end < pos.sourcePos.span.start - case other => - false - } - - typeDefs match - // If we didn't find any trees, it means the enclosingTree is not a TypeDef, - // thus can't be followed with `extends`, `with` and `derives` - case Nil => - // we have to fallback to typed tree and check if it is an enum - enclosing match - case (tree: TypeDef) :: _ if tree.symbol.isEnumClass => - Some(other) - case _ => None - case other => - other - .filter(tree => tree.span.exists && tree.span.end < pos.start) - .maxByOption(_.span.end) - + case _ => false + }) + .filter(tree => tree.span.exists && tree.span.end < pos.start) + .maxByOption(_.span.end) case _ => None } - end findLastSatisfyingTree - def checkForPossibleKeywords( template: Template ): TemplateKeywordAvailability = @@ -219,16 +210,17 @@ object KeywordsCompletions: template.derived.isEmpty ) - findLastSatisfyingTree(pos.cursorPos.span) - .flatMap { - case untpd.TypeDef(_, template: Template) => - Some(checkForPossibleKeywords(template)) - case untpd.ModuleDef(_, template: Template) => - Some(checkForPossibleKeywords(template)) - case template: Template => Some(checkForPossibleKeywords(template)) - case other => None - } - .getOrElse(TemplateKeywordAvailability.default) + val untpdPath = NavigateAST.untypedPath(pos.cursorPos.span) + + findLastSatisfyingTree(untpdPath).orElse { enclosing match + case (typeDef: TypeDef) :: _ if typeDef.symbol.isEnumClass => untpdPath.headOption + case _ => None + }.map { + case untpd.TypeDef(_, template: Template) => checkForPossibleKeywords(template) + case untpd.ModuleDef(_, template: Template) => checkForPossibleKeywords(template) + case template: Template => checkForPossibleKeywords(template) + }.getOrElse(TemplateKeywordAvailability.default) + end checkTemplateForNewParents diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index d55df93f1e70..fe9a73655835 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -74,20 +74,16 @@ object CaseKeywordCompletion: val parents: Parents = selector match case EmptyTree => val seenFromType = parent match - case TreeApply(fun, _) if fun.tpe != null && !fun.tpe.isErroneous => - fun.tpe - case _ => - parent.tpe + case TreeApply(fun, _) if !fun.tpe.isErroneous => fun.tpe + case _ => parent.tpe seenFromType.paramInfoss match case (head :: Nil) :: _ if definitions.isFunctionType(head) || head.isRef( definitions.PartialFunctionClass ) => - val argTypes = - head.argTypes.init + val argTypes = head.argTypes.init new Parents(argTypes, definitions) - case _ => - new Parents(NoType, definitions) + case _ => new Parents(NoType, definitions) case sel => new Parents(sel.tpe, definitions) val selectorSym = parents.selector.widen.metalsDealias.typeSymbol @@ -113,7 +109,7 @@ object CaseKeywordCompletion: ), Nil, range = Some(completionPos.toEditRange), - command = config.parameterHintsCommand().asScala, + command = config.parameterHintsCommand().nn.asScala, ) ) else Nil @@ -305,10 +301,7 @@ object CaseKeywordCompletion: syms.sortBy(_._1.sym.sourcePos.point) else val defnSymbols = search - .definitionSourceToplevels( - SemanticdbSymbols.symbolName(tpe.typeSymbol), - uri - ) + .definitionSourceToplevels(SemanticdbSymbols.symbolName(tpe.typeSymbol), uri).nn .asScala .zipWithIndex .toMap @@ -410,11 +403,7 @@ class CompletionValueGenerator( case None => true case Some("") => true case Some(Cursor.value) => true - case Some(query) => - CompletionFuzzy.matches( - query.replace(Cursor.value, ""), - name - ) + case Some(query) => CompletionFuzzy.matches(query.replace(Cursor.value, "").nn, name) def labelForCaseMember(sym: Symbol, name: String)(using Context diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MultilineCommentCompletion.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MultilineCommentCompletion.scala index 46a23446a7f1..ca207c9f7b80 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MultilineCommentCompletion.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MultilineCommentCompletion.scala @@ -7,7 +7,7 @@ import dotty.tools.dotc.util.SourcePosition object MultilineCommentCompletion: def contribute(config: PresentationCompilerConfig): List[CompletionValue] = - val newText = if config.isCompletionSnippetsEnabled then " $0 */" else " */" + val newText = if config.isCompletionSnippetsEnabled() then " $0 */" else " */" List(CompletionValue.document("/* */", newText, "Multiline Comment")) def isMultilineCommentCompletion(pos: SourcePosition, text: String): Boolean = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index d5ecf60dc341..54325c89945b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -270,11 +270,12 @@ object NamedArgCompletions: ) } - val prefix = - ident - .map(_.name.toString) - .getOrElse("") - .replace(Cursor.value, "") + val prefix = ident + .map(_.name.toString) + .getOrElse("") + .replace(Cursor.value, "") + .nn + val params: List[ParamSymbol] = allParams .filter(param => param.name.startsWith(prefix)) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index c4c9b47ff4a4..e7b1acb9aa87 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -89,7 +89,7 @@ object OverrideCompletions: val name = completing .fold(fallbackName)(sym => Some(sym.name.show)) - .map(_.replace(Cursor.value, "")) + .map(_.replace(Cursor.value, "").nn) .filter(!_.isEmpty()) // not using `td.tpe.abstractTermMembers` because those members includes @@ -173,50 +173,52 @@ object OverrideCompletions: case _ => None end FindTypeDef - val uri = params.uri - driver.run( - uri, - SourceFile.virtual(uri.toASCIIString, params.text) - ) - val unit = driver.currentCtx.run.units.head - val pos = driver.sourcePosition(params) - - val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) - val tpdTree = newctx.compilationUnit.tpdTree - val path = - Interactive.pathTo(tpdTree, pos.span)(using newctx) match - case path @ TypeDef(_, template) :: _ => - template :: path - case path => path - - val indexedContext = IndexedContext( - Interactive.contextOfPath(path)(using newctx) - ) - import indexedContext.ctx + val uri = params.uri().nn + val text = params.text().nn + driver.run(uri, SourceFile.virtual(uri.toASCIIString().nn, text)) + + val unit = driver.currentCtx.run.nn.units.headOption + unit match + case None => new ju.ArrayList[l.TextEdit]() + case Some(unit) => + val pos = driver.sourcePosition(params) + + val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) + val tpdTree = newctx.compilationUnit.tpdTree + val path = + Interactive.pathTo(tpdTree, pos.span)(using newctx) match + case path @ TypeDef(_, template) :: _ => + template :: path + case path => path + + val indexedContext = IndexedContext( + Interactive.contextOfPath(path)(using newctx) + ) + import indexedContext.ctx - lazy val autoImportsGen = AutoImports.generator( - pos, - params.text, - unit.tpdTree, - unit.comments, - indexedContext, - config - ) - lazy val implementAll = implementAllFor( - indexedContext, - params.text, - search, - autoImportsGen, - config - ) - path match - // given <> - case (_: Ident) :: (dd: DefDef) :: _ => - implementAll(dd).asJava - case FindTypeDef(td) => - implementAll(td).asJava - case _ => - new ju.ArrayList[l.TextEdit]() + lazy val autoImportsGen = AutoImports.generator( + pos, + text, + unit.tpdTree, + unit.comments, + indexedContext, + config + ) + lazy val implementAll = implementAllFor( + indexedContext, + text, + search, + autoImportsGen, + config + ) + path match + // given <> + case (_: Ident) :: (dd: DefDef) :: _ => + implementAll(dd).asJava + case FindTypeDef(td) => + implementAll(td).asJava + case _ => + new ju.ArrayList[l.TextEdit]() end implementAllAt private def implementAllFor( @@ -440,7 +442,7 @@ object OverrideCompletions: val label = s"$overrideDefLabel$signature" val stub = - if config.isCompletionSnippetsEnabled && shouldMoveCursor then "${0:???}" + if config.isCompletionSnippetsEnabled() && shouldMoveCursor then "${0:???}" else "???" val value = s"$signature = $stub" diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/ScaladocCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/ScaladocCompletions.scala index 81260c4df923..136c4a268df7 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/ScaladocCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/ScaladocCompletions.scala @@ -31,7 +31,7 @@ object ScaladocCompletions: val builder = new StringBuilder() builder.append("\n") builder.append(s"${indent}*") - if config.isCompletionSnippetsEnabled then builder.append(" $0\n") + if config.isCompletionSnippetsEnabled() then builder.append(" $0\n") else builder.append("\n") if params.nonEmpty || hasReturnValue then builder.append(s"$indent*\n") diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index 5652fd0d9bcc..9c255d20d212 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -295,7 +295,7 @@ class ShortenedTypePrinter( lazy val paramsDocs = symbolSearch.symbolDocumentation(gsym) match case Some(info) => - (info.typeParameters.asScala ++ info.parameters.asScala).toSeq + (info.typeParameters().nn.asScala ++ info.parameters().nn.asScala).toSeq case _ => Seq.empty @@ -503,7 +503,7 @@ class ShortenedTypePrinter( if includeDefaultParam == ShortenedTypePrinter.IncludeDefaultParam.Include && isDefaultParam then val defaultValue = docInfo match - case Some(value) if !value.defaultValue().isEmpty => + case Some(value) if !value.defaultValue().nn.isEmpty() => value.defaultValue() case _ => "..." s" = $defaultValue" diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala index ff081c779342..337e0790b738 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala @@ -9,6 +9,7 @@ import scala.meta.pc.RangeParams import scala.meta.pc.SymbolDocumentation import scala.meta.pc.SymbolSearch import scala.util.control.NonFatal +import scala.jdk.OptionConverters.* import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Contexts.* @@ -37,29 +38,29 @@ object MtagsEnrichments extends CommonMtagsEnrichments: def sourcePosition( params: OffsetParams ): SourcePosition = - val uri = params.uri - val source = driver.openedFiles(uri) + val uri = params.uri() + val source = driver.openedFiles(uri.nn) val span = params match - case p: RangeParams if p.offset != p.endOffset => + case p: RangeParams if p.offset() != p.endOffset() => p.trimWhitespaceInRange.fold { - Spans.Span(p.offset, p.endOffset) + Spans.Span(p.offset(), p.endOffset()) } { case trimmed: RangeParams => - Spans.Span(trimmed.offset, trimmed.endOffset) + Spans.Span(trimmed.offset(), trimmed.endOffset()) case offset => - Spans.Span(p.offset, p.offset) + Spans.Span(p.offset(), p.offset()) } - case _ => Spans.Span(params.offset) + case _ => Spans.Span(params.offset()) new SourcePosition(source, span) end sourcePosition def localContext(params: OffsetParams): Context = - if driver.currentCtx.run.units.isEmpty then + if driver.currentCtx.run.nn.units.isEmpty then throw new RuntimeException( "No source files were passed to the Scala 3 presentation compiler" ) - val unit = driver.currentCtx.run.units.head + val unit = driver.currentCtx.run.nn.units.head val pos = driver.sourcePosition(params) val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) val tpdPath = @@ -100,7 +101,7 @@ object MtagsEnrichments extends CommonMtagsEnrichments: for uri <- InteractiveDriver.toUriOption(pos.source) range <- if pos.exists then Some(pos.toLsp) else None - yield new l.Location(uri.toString, range) + yield new l.Location(uri.toString(), range) def encloses(other: SourcePosition): Boolean = pos.start <= other.start && pos.end >= other.end @@ -208,8 +209,7 @@ object MtagsEnrichments extends CommonMtagsEnrichments: sym, () => parentSymbols.iterator.map(toSemanticdbSymbol).toList.asJava, ) - if documentation.isPresent then Some(documentation.get()) - else None + documentation.nn.toScala end symbolDocumentation end extension diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseAutoImportsSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseAutoImportsSuite.scala index 57efb7dce4dc..f2732c28e93a 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseAutoImportsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseAutoImportsSuite.scala @@ -5,6 +5,7 @@ import java.nio.file.Paths import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.CompilerOffsetParams import scala.meta.pc.AutoImportsResult +import scala.language.unsafeNulls import dotty.tools.pc.utils.TextEdits diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseCodeActionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseCodeActionSuite.scala index bab6bb529c1e..a6a864c901d5 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseCodeActionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseCodeActionSuite.scala @@ -6,6 +6,7 @@ import java.nio.file.Files import scala.collection.immutable import scala.meta.internal.metals.EmptyCancelToken import scala.meta.pc.CancelToken +import scala.language.unsafeNulls abstract class BaseCodeActionSuite extends BasePCSuite: diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala index 76bf795ccfc5..8314c9370fca 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala @@ -6,6 +6,7 @@ import java.util.Collections import scala.jdk.CollectionConverters.* import scala.meta.internal.metals.{CompilerOffsetParams, EmptyCancelToken} import scala.meta.pc.CancelToken +import scala.language.unsafeNulls import dotty.tools.pc.utils.MtagsEnrichments.* import dotty.tools.pc.utils.{TestCompletions, TextEdits} diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseDocumentHihglightSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseDocumentHihglightSuite.scala index bc470bf3cab2..730f32e3d41b 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseDocumentHihglightSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseDocumentHihglightSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.{CompilerOffsetParams, EmptyCancelToken} +import scala.language.unsafeNulls import dotty.tools.pc.utils.RangeReplace diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseExtractMethodSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseExtractMethodSuite.scala index 364012eb4405..af4f21fbe991 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseExtractMethodSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseExtractMethodSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.{CompilerOffsetParams, CompilerRangeParams} +import scala.language.unsafeNulls import dotty.tools.pc.utils.TextEdits diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala index a541c39b4761..0b8d663f8b33 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala @@ -3,6 +3,7 @@ package dotty.tools.pc.base import java.nio.file.Paths import scala.meta.internal.metals.{CompilerOffsetParams, CompilerRangeParams} +import scala.language.unsafeNulls import dotty.tools.pc.utils.MtagsEnrichments.* import dotty.tools.pc.utils.{RangeReplace, TestHovers} diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala index eb7a0783d941..7e2f6669c6a7 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala @@ -11,6 +11,7 @@ import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.{ClasspathSearch, ExcludedPackagesHandler} import scala.meta.internal.pc.PresentationCompilerConfigImpl import scala.meta.pc.{PresentationCompiler, PresentationCompilerConfig} +import scala.language.unsafeNulls import dotty.tools.pc.* import dotty.tools.pc.ScalaPresentationCompiler diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala index a4e67bbdac17..8269d4ce1c44 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala @@ -4,6 +4,7 @@ import java.nio.file.Paths import scala.meta.internal.metals.CompilerOffsetParams import scala.meta.pc.OffsetParams +import scala.language.unsafeNulls import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.{SourceFile, SourcePosition} diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePcRenameSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePcRenameSuite.scala index 52ffb91c522a..dd30eda1a1c5 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePcRenameSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePcRenameSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.{CompilerOffsetParams, EmptyCancelToken} +import scala.language.unsafeNulls import dotty.tools.pc.utils.{RangeReplace, TextEdits} diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseSelectionRangeSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseSelectionRangeSuite.scala index 3dc854e86477..1f4b15f3500a 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseSelectionRangeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseSelectionRangeSuite.scala @@ -7,6 +7,7 @@ import scala.collection.immutable import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.{CompilerOffsetParams, EmptyCancelToken} import scala.meta.pc.OffsetParams +import scala.language.unsafeNulls import dotty.tools.pc.utils.TestExtensions._ diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseSemanticTokensSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseSemanticTokensSuite.scala index 660549a32943..7b7ec5b551d2 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseSemanticTokensSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseSemanticTokensSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.CompilerVirtualFileParams +import scala.language.unsafeNulls import dotty.tools.pc.utils.TestSemanticTokens diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala index 2306c51bffdb..f993bb49921e 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala @@ -4,6 +4,7 @@ import java.nio.file.Paths import scala.jdk.CollectionConverters.* import scala.meta.internal.metals.CompilerOffsetParams +import scala.language.unsafeNulls abstract class BaseSignatureHelpSuite extends BasePCSuite: def checkDoc( diff --git a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala index 43ebffa25e1f..82e697e6e9a1 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala @@ -1,6 +1,7 @@ package dotty.tools.pc.base import scala.jdk.CollectionConverters._ +import scala.language.unsafeNulls import org.junit.runners.BlockJUnit4ClassRunner import org.junit.runners.model.FrameworkMethod diff --git a/presentation-compiler/test/dotty/tools/pc/tests/CompilerJobQueueSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/CompilerJobQueueSuite.scala index 97a8759f3d53..123eed81a51c 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/CompilerJobQueueSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/CompilerJobQueueSuite.scala @@ -7,6 +7,7 @@ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future, Promise} import scala.meta.internal.pc.CompilerJobQueue +import scala.language.unsafeNulls import scala.util.Try import org.junit.{After, Assert, Before, Test} diff --git a/presentation-compiler/test/dotty/tools/pc/tests/PcSemanticdbSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/PcSemanticdbSuite.scala index 192abb418373..8d412bcc844a 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/PcSemanticdbSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/PcSemanticdbSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import dotty.tools.dotc.semanticdb.{SymbolOccurrence, TextDocument} import dotty.tools.pc.base.BasePCSuite +import scala.language.unsafeNulls import org.junit.Test diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala index db32ec8e894f..4746eb93f25d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala @@ -16,6 +16,7 @@ import scala.meta.internal.pc.{ PresentationCompilerConfigImpl } import scala.meta.pc.{CancelToken, PresentationCompilerConfig} +import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCompletionSuite diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala index ded1700fd603..583b138a255b 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala @@ -505,6 +505,19 @@ class CompletionKeywordSuite extends BaseCompletionSuite: |""".stripMargin ) + @Test def `extends-class-nested-in-object` = + check( + """ + |package foo + | + |object Foo { + | class Boo ext@@ + |} + """.stripMargin, + """|extends + |""".stripMargin + ) + @Test def `extends-class-nested-with-body` = check( """ diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index a64a6dfac6a2..055363830a1b 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -1,6 +1,7 @@ package dotty.tools.pc.tests.completion import scala.meta.pc.SymbolDocumentation +import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCompletionSuite import dotty.tools.pc.utils.MockEntries diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index 2b49d2db3f08..358e159eb539 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -2,6 +2,7 @@ package dotty.tools.pc.tests.definition import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.pc.OffsetParams +import scala.language.unsafeNulls import dotty.tools.pc.base.BasePcDefinitionSuite import dotty.tools.pc.utils.MockEntries diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/TypeDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/TypeDefinitionSuite.scala index b8d737b0991d..1fe4abfa9628 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/TypeDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/TypeDefinitionSuite.scala @@ -2,6 +2,7 @@ package dotty.tools.pc.tests.definition import scala.jdk.CollectionConverters.* import scala.meta.pc.OffsetParams +import scala.language.unsafeNulls import dotty.tools.pc.base.BasePcDefinitionSuite import dotty.tools.pc.utils.MockEntries diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala index 72a7c01597a7..04c3f8a018e9 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.CompilerOffsetParams +import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCodeActionSuite import dotty.tools.pc.utils.TextEdits diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/ConvertToNamedArgumentsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/ConvertToNamedArgumentsSuite.scala index 9d3cf6c5c92e..5285be83b537 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/ConvertToNamedArgumentsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/ConvertToNamedArgumentsSuite.scala @@ -7,6 +7,7 @@ import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.CompilerOffsetParams import scala.meta.internal.pc.CodeActionErrorMessages import scala.meta.pc.DisplayableException +import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCodeActionSuite import dotty.tools.pc.utils.TextEdits @@ -102,8 +103,7 @@ class ConvertToNamedArgumentsSuite extends BaseCodeActionSuite: catch case e: ExecutionException => e.getCause() match - case cause: DisplayableException => - assertNoDiff(expectedErrorMsg, cause.getMessage) + case cause => assertNoDiff(expectedErrorMsg, cause.getMessage) def checkEdit( original: String, diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/InlineValueSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/InlineValueSuite.scala index 60f5f01d1424..0cec3952a7ef 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/InlineValueSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/InlineValueSuite.scala @@ -7,6 +7,7 @@ import scala.meta.internal.metals.CompilerOffsetParams import scala.meta.internal.mtags.CommonMtagsEnrichments import scala.meta.internal.pc.InlineValueProvider.Errors as InlineErrors import scala.meta.pc.DisplayableException +import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCodeActionSuite import dotty.tools.pc.utils.TextEdits diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala index 982264fa4b2a..d6707c54894e 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala @@ -4,6 +4,7 @@ import java.net.URI import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.CompilerOffsetParams +import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCodeActionSuite import dotty.tools.pc.utils.TextEdits diff --git a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala index 3dc23fd8ab37..b0be98850630 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala @@ -14,6 +14,7 @@ import scala.meta.pc.{ SymbolSearch, SymbolSearchVisitor } +import scala.language.unsafeNulls import org.eclipse.lsp4j.Location diff --git a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala index 8d9f2446cd90..b77ad7f64bde 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala @@ -1,5 +1,7 @@ package dotty.tools.pc.utils +import scala.language.unsafeNulls + import dotty.tools.dotc.util.DiffUtil import dotty.tools.pc.utils.MtagsEnrichments.* diff --git a/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala b/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala index 7dc0f0a2a5c7..0b41b106eb02 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala @@ -1,6 +1,7 @@ package dotty.tools.pc.utils import scala.collection.immutable +import scala.language.unsafeNulls import dotty.tools.pc.utils.TestExtensions.* diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestCompletions.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestCompletions.scala index 2450ce870ac4..769ad7675866 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestCompletions.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestCompletions.scala @@ -1,6 +1,7 @@ package dotty.tools.pc.utils import org.eclipse.lsp4j.CompletionItem +import scala.language.unsafeNulls object TestCompletions: diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestHovers.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestHovers.scala index b0cf49048137..1241f5aeaa65 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestHovers.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestHovers.scala @@ -1,6 +1,7 @@ package dotty.tools.pc.utils import scala.meta.internal.pc.HoverMarkup +import scala.language.unsafeNulls import dotty.tools.pc.utils.TestExtensions.* diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestSemanticTokens.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestSemanticTokens.scala index 5c298668a704..25da747f5812 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestSemanticTokens.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestSemanticTokens.scala @@ -6,6 +6,7 @@ import scala.collection.mutable.ListBuffer import scala.meta.internal.pc.SemanticTokens import scala.meta.internal.pc.SemanticTokens.* import scala.meta.pc.Node +import scala.language.unsafeNulls import org.eclipse.lsp4j as l diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala index ebbb29769ebb..3e8f6d261155 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala @@ -10,6 +10,7 @@ import scala.meta.internal.metals.{ WorkspaceSymbolQuery } import scala.meta.pc.SymbolSearchVisitor +import scala.language.unsafeNulls import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Symbols.* diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TextEdits.scala b/presentation-compiler/test/dotty/tools/pc/utils/TextEdits.scala index 890a9366e9b5..f6d12bd88e9a 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TextEdits.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TextEdits.scala @@ -2,6 +2,7 @@ package dotty.tools.pc.utils import scala.jdk.CollectionConverters.* import scala.meta.internal.mtags.CommonMtagsEnrichments.* +import scala.language.unsafeNulls import dotty.tools.pc.utils.TestExtensions.* diff --git a/project/Build.scala b/project/Build.scala index d44effa5431f..3ac19a0f7b35 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1254,18 +1254,20 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings lazy val presentationCompilerSettings = { - val mtagsVersion = "1.0.0" + val mtagsVersion = "1.1.0+53-af181de4-SNAPSHOT" Seq( + resolvers ++= Resolver.sonatypeOssRepos("snapshots"), libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", "io.get-coursier" % "interface" % "1.0.18", "org.scalameta" % "mtags-interfaces" % mtagsVersion, ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.11" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.12" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), - (Compile / sourceGenerators) += Def.task { + Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + Compile / sourceGenerators += Def.task { val s = streams.value val cacheDir = s.cacheDirectory val targetDir = (Compile/sourceManaged).value / "mtags-shared" diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala index eb27987f3f6c..824aec6daa16 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala @@ -1,11 +1,14 @@ package dotty.tools.scaladoc package signatures +import java.nio.file.Path; + import scala.io.Source import scala.jdk.CollectionConverters._ import scala.util.matching.Regex +import scala.language.unsafeNulls + import dotty.tools.scaladoc.test.BuildInfo -import java.nio.file.Path; import org.jsoup.Jsoup import util.IO import org.junit.Assert.assertTrue From 2d1d8aa62f5b0894f86b56f066c737bff8968f4e Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Wed, 8 Nov 2023 15:04:54 -0500 Subject: [PATCH 169/216] Add original test for i18628 --- tests/init-global/pos/i18628.scala | 91 ++++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 tests/init-global/pos/i18628.scala diff --git a/tests/init-global/pos/i18628.scala b/tests/init-global/pos/i18628.scala new file mode 100644 index 000000000000..71d9a1ed30a8 --- /dev/null +++ b/tests/init-global/pos/i18628.scala @@ -0,0 +1,91 @@ +abstract class Reader[+T] { + def first: T + + def rest: Reader[T] + + def atEnd: Boolean +} + +trait Parsers { + type Elem + type Input = Reader[Elem] + + sealed abstract class ParseResult[+T] { + val successful: Boolean + + def map[U](f: T => U): ParseResult[U] + + def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U] + } + + sealed abstract class NoSuccess(val msg: String) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error + val successful = false + + def map[U](f: Nothing => U) = this + + def flatMapWithNext[U](f: Nothing => Input => ParseResult[U]): ParseResult[U] + = this + } + + case class Failure(override val msg: String) extends NoSuccess(msg) + + case class Error(override val msg: String) extends NoSuccess(msg) + + case class Success[+T](result: T, val next: Input) extends ParseResult[T] { + val successful = true + + def map[U](f: T => U) = Success(f(result), next) + + def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U] = f(result)(next) match { + case s @ Success(result, rest) => Success(result, rest) + case f: Failure => f + case e: Error => e + } + } + + case class ~[+a, +b](_1: a, _2: b) { + override def toString = s"(${_1}~${_2})" + } + + abstract class Parser[+T] extends (Input => ParseResult[T]) { + def apply(in: Input): ParseResult[T] + + def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { + (for(a <- this; b <- q) yield new ~(a,b)) + } + + def flatMap[U](f: T => Parser[U]): Parser[U] + = Parser{ in => this(in) flatMapWithNext(f)} + + def map[U](f: T => U): Parser[U] //= flatMap{x => success(f(x))} + = Parser{ in => this(in) map(f)} + + def ^^ [U](f: T => U): Parser[U] = map(f) + } + + def Parser[T](f: Input => ParseResult[T]): Parser[T] + = new Parser[T]{ def apply(in: Input) = f(in) } + + def accept(e: Elem): Parser[Elem] = acceptIf(_ == e)("'"+e+"' expected but " + _ + " found") + + def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in => + if (in.atEnd) Failure("end of input") + else if (p(in.first)) Success(in.first, in.rest) + else Failure(err(in.first)) + } +} + + +object grammars3 extends Parsers { + type Elem = String + + val a: Parser[String] = accept("a") + val b: Parser[String] = accept("b") + + val AnBnCn: Parser[List[String]] = { + repMany(a,b) + } + + def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] = + p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)} +} \ No newline at end of file From 2c582c3aac703b288c15e22b2fca7d58bd69ca50 Mon Sep 17 00:00:00 2001 From: EnzeXing Date: Wed, 8 Nov 2023 16:35:35 -0500 Subject: [PATCH 170/216] Update failing minimized test --- tests/init-global/neg/i18628.scala | 2 +- tests/init-global/neg/i18628_3.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/init-global/neg/i18628.scala b/tests/init-global/neg/i18628.scala index a89e98e8bd97..798b3204338c 100644 --- a/tests/init-global/neg/i18628.scala +++ b/tests/init-global/neg/i18628.scala @@ -2,6 +2,6 @@ object Test: class Box(val x: Int) def recur(a: => Box, b: => Box): Int = - a.x + recur(a, b) + b.x // error + a.x + recur(a, b) + b.x // error // error recur(Box(1), Box(2)) \ No newline at end of file diff --git a/tests/init-global/neg/i18628_3.scala b/tests/init-global/neg/i18628_3.scala index 563d55b26f23..101674cffb6f 100644 --- a/tests/init-global/neg/i18628_3.scala +++ b/tests/init-global/neg/i18628_3.scala @@ -4,6 +4,6 @@ object Test: class Box(val x: Int) def recur(a: => Box, b: => Box): Int = - a.x + recur(a: @widen(5), b: @widen(5)) + b.x // error + a.x + recur(a: @widen(5), b: @widen(5)) + b.x // error // error recur(Box(1), Box(2)) \ No newline at end of file From c7bbef5696ef325cc2787926c53b86f70362b5b2 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 7 Nov 2023 09:49:23 +0100 Subject: [PATCH 171/216] Deprecation warnings for old syntax (`xs: _*` varargs) --- .../src/scala/dotty/communitybuild/projects.scala | 2 +- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 9 +++++---- sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala | 6 +++--- tests/neg/i18862-3.4.check | 5 +++++ tests/neg/i18862-3.4.scala | 6 ++++++ tests/neg/i18862-future-migration.scala | 6 ++++++ tests/neg/i18862-future.scala | 4 ++++ tests/patmat/exhaustive_heuristics.scala | 4 ++-- tests/semanticdb/metac.expect | 5 +++++ 9 files changed, 37 insertions(+), 10 deletions(-) create mode 100644 tests/neg/i18862-3.4.check create mode 100644 tests/neg/i18862-3.4.scala create mode 100644 tests/neg/i18862-future-migration.scala create mode 100644 tests/neg/i18862-future.scala diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index 974800cdcce1..a56143e7003d 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -25,7 +25,7 @@ def exec(projectDir: Path, binary: String, arguments: Seq[String], environment: import scala.jdk.CollectionConverters._ val command = binary +: arguments log(command.mkString(" ")) - val builder = new ProcessBuilder(command: _*).directory(projectDir.toFile).inheritIO() + val builder = new ProcessBuilder(command*).directory(projectDir.toFile).inheritIO() builder.environment.putAll(environment.asJava) val process = builder.start() val exitCode = process.waitFor() diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index d479c7de8cc7..29cc4c2454f2 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -2344,11 +2344,12 @@ object Parsers { val isVarargSplice = location.inArgs && followingIsVararg() in.nextToken() if isVarargSplice then - report.errorOrMigrationWarning( - em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead${rewriteNotice(`future-migration`)}", + report.gradualErrorOrMigrationWarning( + em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead${rewriteNotice(`3.4-migration`)}", in.sourcePos(uscoreStart), - future) - if sourceVersion == `future-migration` then + warnFrom = `3.4`, + errorFrom = future) + if sourceVersion.isMigrating && sourceVersion.isAtLeast(`3.4-migration`) then patch(source, Span(t.span.end, in.lastOffset), "*") else if opStack.nonEmpty then report.errorOrMigrationWarning( diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index 87bc45744e21..f17be692ee50 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -25,7 +25,7 @@ object ScalaCompilerForUnitTesting: class ScalaCompilerForUnitTesting { def extractEnteredPhases(srcs: String*): Seq[List[String]] = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs: _*) + val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*) val run = testProgress.runs.head tempSrcFiles.map(src => run.unitPhases(src.id)) } @@ -37,7 +37,7 @@ class ScalaCompilerForUnitTesting { } def extractProgressPhases(srcs: String*): List[String] = { - val (_, Callbacks(_, testProgress)) = compileSrcs(srcs: _*) + val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*) testProgress.runs.head.phases } @@ -91,7 +91,7 @@ class ScalaCompilerForUnitTesting { * Only the names used in the last src file are returned. */ def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { - val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources: _*) + val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*) srcFiles .map { srcFile => val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) diff --git a/tests/neg/i18862-3.4.check b/tests/neg/i18862-3.4.check new file mode 100644 index 000000000000..b56454feeeaa --- /dev/null +++ b/tests/neg/i18862-3.4.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i18862-3.4.scala:6:38 ------------------------------------------------------------------------------ +6 |def test(xs: List[Int]): Unit = f(xs: _*) // error: migration warning + | ^ + | The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/i18862-3.4.scala b/tests/neg/i18862-3.4.scala new file mode 100644 index 000000000000..a30c8c8f1a59 --- /dev/null +++ b/tests/neg/i18862-3.4.scala @@ -0,0 +1,6 @@ +//> using options -Werror + +import scala.language.`3.4` + +def f(x: Int*): Unit = () +def test(xs: List[Int]): Unit = f(xs: _*) // error: migration warning diff --git a/tests/neg/i18862-future-migration.scala b/tests/neg/i18862-future-migration.scala new file mode 100644 index 000000000000..ff8ba1c377c3 --- /dev/null +++ b/tests/neg/i18862-future-migration.scala @@ -0,0 +1,6 @@ +//> using options -Werror + +import scala.language.`future-migration` + +def f(x: Int*): Unit = () +def test(xs: List[Int]): Unit = f(xs: _*) // error: migration warning diff --git a/tests/neg/i18862-future.scala b/tests/neg/i18862-future.scala new file mode 100644 index 000000000000..07fc72aef34a --- /dev/null +++ b/tests/neg/i18862-future.scala @@ -0,0 +1,4 @@ +import scala.language.future + +def f(x: Int*): Unit = () +def test(xs: List[Int]): Unit = f(xs: _*) // error: migration error diff --git a/tests/patmat/exhaustive_heuristics.scala b/tests/patmat/exhaustive_heuristics.scala index 7d682f6aa457..297900510b2a 100644 --- a/tests/patmat/exhaustive_heuristics.scala +++ b/tests/patmat/exhaustive_heuristics.scala @@ -18,8 +18,8 @@ object Test { // well, in truth, we do rewrite List() to Nil, but otherwise we do nothing // the full rewrite List(a, b) to a :: b :: Nil, for example is planned (but not sure it's a good idea) List(true, false) match { - case List(_, _, _:_*) => - case List(node, _:_*) => + case List(_, _, _*) => + case List(node, _*) => case Nil => } diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 8e790621ba07..956be1ccc969 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -3153,6 +3153,7 @@ Text => empty Language => Scala Symbols => 68 entries Occurrences => 115 entries +Diagnostics => 1 entries Synthetics => 3 entries Symbols: @@ -3342,6 +3343,10 @@ Occurrences: [32:49..32:56): pickOne -> example/SpecialRefinement#pickOne(). [32:57..32:59): as -> example/PickOneRefinement_1#run().(as) +Diagnostics: +[32:60..32:60): [warning] The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead +This construct can be rewritten automatically under -rewrite -source 3.4-migration. + Synthetics: [15:23..15:34):elems.toMap => *[String, Any] [15:23..15:34):elems.toMap => *(refl[Tuple2[String, Any]]) From f29b3d6d41d60893e2cde283e2831263a17e6981 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Mon, 6 Nov 2023 18:08:57 +0100 Subject: [PATCH 172/216] Fix #18769: Allow HK type args in Java signatures. Contrary to what an earlier comment said, we do emit HK type parameters in Java signatures. They are always unbounded and never the type of values. However, they can appear as type arguments to other higher-kinded types. Previously, an assertion error would trigger in that situation. We relax the assertion to allow this situation and emit a correct Java signature. I manually verified that the generated Java signatures are consistent with what Scala 2 emits for the same code snippet. --- .../dotty/tools/dotc/transform/GenericSignatures.scala | 3 +-- tests/pos/i18769.scala | 9 +++++++++ 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 tests/pos/i18769.scala diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 77f4f76c33ba..88297e88ce7d 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -258,7 +258,7 @@ object GenericSignatures { if (sym == defn.PairClass && tupleArity(tp) > Definitions.MaxTupleArity) jsig(defn.TupleXXLClass.typeRef) else if (isTypeParameterInSig(sym, sym0)) { - assert(!sym.isAliasType, "Unexpected alias type: " + sym) + assert(!sym.isAliasType || sym.info.isLambdaSub, "Unexpected alias type: " + sym) typeParamSig(sym.name.lastPart) } else if (defn.specialErasure.contains(sym)) @@ -407,7 +407,6 @@ object GenericSignatures { // only refer to type params that will actually make it into the sig, this excludes: - // * higher-order type parameters // * type parameters appearing in method parameters // * type members not visible in an enclosing template private def isTypeParameterInSig(sym: Symbol, initialSymbol: Symbol)(using Context) = diff --git a/tests/pos/i18769.scala b/tests/pos/i18769.scala new file mode 100644 index 000000000000..be5db80b7727 --- /dev/null +++ b/tests/pos/i18769.scala @@ -0,0 +1,9 @@ +trait Arb[Fx[_]] { + def pure[A](x: A): Fx[A] +} + +class PfOps(private val self: Int) extends AnyVal { + def pf[Fy[_]](m: Arb[Fy]): PartialFunction[Int, Fy[Int]] = { + case x => m.pure(x) + } +} From c2bffc78b4d5a6e193cda648d98efc96034ac7f0 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Thu, 9 Nov 2023 11:18:14 +0100 Subject: [PATCH 173/216] Fix capture set variable installation in Setup --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 ++-- .../captures/cc-setup-impure-classes.scala | 5 +++++ .../captures/future-traverse.scala | 16 ++++++++++++++++ 3 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/cc-setup-impure-classes.scala create mode 100644 tests/pos-custom-args/captures/future-traverse.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 270fd9322a88..85942950f317 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -552,7 +552,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then - sym == defn.AnyClass + sym == defn.AnyClass || !sym.isPureClass // we assume Any is a shorthand of {cap} Any, so if Any is an upper // bound, the type is taken to be impure. else @@ -708,4 +708,4 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postCheck()(using Context): Unit = for chk <- todoAtPostCheck do chk(ctx) todoAtPostCheck.clear() -end Setup \ No newline at end of file +end Setup diff --git a/tests/pos-custom-args/captures/cc-setup-impure-classes.scala b/tests/pos-custom-args/captures/cc-setup-impure-classes.scala new file mode 100644 index 000000000000..db88851b6a52 --- /dev/null +++ b/tests/pos-custom-args/captures/cc-setup-impure-classes.scala @@ -0,0 +1,5 @@ +import language.experimental.captureChecking + +trait Resource +def id[X](x: X): x.type = x +def foo[M <: Resource](r: M^): Unit = id(r) diff --git a/tests/pos-custom-args/captures/future-traverse.scala b/tests/pos-custom-args/captures/future-traverse.scala new file mode 100644 index 000000000000..5aedc5d29852 --- /dev/null +++ b/tests/pos-custom-args/captures/future-traverse.scala @@ -0,0 +1,16 @@ +import language.experimental.captureChecking + +trait Builder[-A, +C] +trait BuildFrom[-From, -A, +C] { + def newBuilder(from: From): Builder[A, C] +} + +trait Future[+T] { this: Future[T]^ => + import Future.* + def foldLeft[R](r: R): R = r + def traverse[A, B, M[X] <: IterableOnce[X]](in: M[A]^, bf: BuildFrom[M[A]^, B, M[B]^]): Unit = + foldLeft(successful(bf.newBuilder(in))) +} +object Future { + def successful[T](result: T): Future[T] = ??? +} From 789145cfbb078b30b6c3242627af29c95950fb25 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 18 Oct 2023 10:01:24 +0200 Subject: [PATCH 174/216] Keep qualifier of Ident when selecting setter We already keep the qualifier as a typed splice if the prefix is an explicit Select. Fixes #18713 --- .../src/dotty/tools/dotc/typer/Typer.scala | 14 +++++++++++++- tests/pos/i18713.scala | 18 ++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 tests/pos/i18713.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 9f6bf8c7dee5..ac90664ddddf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1119,7 +1119,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case Apply(fn, _) if fn.symbol.is(ExtensionMethod) => def toSetter(fn: Tree): untpd.Tree = fn match case fn @ Ident(name: TermName) => - untpd.cpy.Ident(fn)(name.setterName) + // We need to make sure that the prefix of this extension getter is + // retained when we transform it into a setter. Otherwise, we could + // end up resoving an unrelated setter from another extension. We + // transform the `Ident` into a `Select` to ensure that the prefix + // is retained with a `TypedSplice` (see `case Select` bellow). + // See tests/pos/i18713.scala for an example. + fn.tpe match + case TermRef(qual: TermRef, _) => + toSetter(ref(qual).select(fn.symbol).withSpan(fn.span)) + case TermRef(qual: ThisType, _) => + toSetter(This(qual.cls).select(fn.symbol).withSpan(fn.span)) + case TermRef(NoPrefix, _) => + untpd.cpy.Ident(fn)(name.setterName) case fn @ Select(qual, name: TermName) => untpd.cpy.Select(fn)(untpd.TypedSplice(qual), name.setterName) case fn @ TypeApply(fn1, targs) => diff --git a/tests/pos/i18713.scala b/tests/pos/i18713.scala new file mode 100644 index 000000000000..0c406aa09d2e --- /dev/null +++ b/tests/pos/i18713.scala @@ -0,0 +1,18 @@ +import language.experimental.relaxedExtensionImports + +class A +object AA: + extension (a: A) + def f = ??? + def f_=(x: String) = ??? + +object BB: + extension (b: Long) + def f = ??? + def f_=(x: String) = ??? + +def test(a: A) = + import AA.* + import BB.* + a.f + a.f = "aa" From 716304263bc6f21362c7c19877e0f0995d2c0886 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 26 Oct 2023 16:43:06 +0200 Subject: [PATCH 175/216] Add structural classes of dynamicApply before inlining --- compiler/src/dotty/tools/dotc/core/Mode.scala | 4 +-- .../dotty/tools/dotc/inlines/Inlines.scala | 1 + .../tools/dotc/transform/PostTyper.scala | 4 +-- .../src/dotty/tools/dotc/typer/Dynamic.scala | 10 +++++- tests/run/i17761.check | 8 +++++ tests/run/i17761.scala | 33 +++++++++++++++++++ 6 files changed, 55 insertions(+), 5 deletions(-) create mode 100644 tests/run/i17761.check create mode 100644 tests/run/i17761.scala diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 133d110cabda..71b49394ae14 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -163,6 +163,6 @@ object Mode { */ val RelaxedOverriding: Mode = newMode(30, "RelaxedOverriding") - /** We are checking the original call of an Inlined node */ - val InlinedCall: Mode = newMode(31, "InlinedCall") + /** Skip inlining of methods. */ + val NoInline: Mode = newMode(31, "NoInline") } diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 6b41f65e2fdd..5bdd0bb98495 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -64,6 +64,7 @@ object Inlines: ) && !ctx.typer.hasInliningErrors && !ctx.base.stopInlining + && !ctx.mode.is(Mode.NoInline) } private def needsTransparentInlining(tree: Tree)(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 79470addd03d..e59f5e1c40ab 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -366,7 +366,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => case tree @ Inlined(call, bindings, expansion) if !tree.inlinedFromOuterScope => val pos = call.sourcePos CrossVersionChecks.checkExperimentalRef(call.symbol, pos) - withMode(Mode.InlinedCall)(transform(call)) + withMode(Mode.NoInline)(transform(call)) val callTrace = ref(call.symbol)(using ctx.withSource(pos.source)).withSpan(pos.span) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(tree))) case templ: Template => @@ -520,7 +520,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if (sym.isEffectivelyErased) dropInlines.transform(rhs) else rhs private def registerNeedsInlining(tree: Tree)(using Context): Unit = - if tree.symbol.is(Inline) && !Inlines.inInlineMethod && !ctx.mode.is(Mode.InlinedCall) then + if tree.symbol.is(Inline) && !Inlines.inInlineMethod && !ctx.mode.is(Mode.NoInline) then ctx.compilationUnit.needsInlining = true /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 71b32b639997..91f863eacfd3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -7,6 +7,8 @@ import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags.* +import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Names.{Name, TermName} import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.Types.* @@ -18,6 +20,7 @@ import ErrorReporting.* import dotty.tools.dotc.transform.ValueClasses import dotty.tools.dotc.transform.TypeUtils.isPrimitiveValueType import reporting.* +import inlines.Inlines object Dynamic { private def isDynamicMethod(name: Name): Boolean = @@ -210,7 +213,12 @@ trait Dynamic { case _ => tree case other => tree case _ => tree - addClassOfs(typed(scall)) + + // We type the application of `applyDynamic` without inlining (arguments are already typed and inlined), + // to be able to add the add the Class arguments before we inline the method. + val call = addClassOfs(withMode(Mode.NoInline)(typed(scall))) + if Inlines.needsInlining(call) then Inlines.inlineCall(call) + else call } def fail(reason: String): Tree = diff --git a/tests/run/i17761.check b/tests/run/i17761.check new file mode 100644 index 000000000000..6e31f05b09df --- /dev/null +++ b/tests/run/i17761.check @@ -0,0 +1,8 @@ +Normal +test +ArraySeq(class java.lang.String, int) +ArraySeq(test, 42) +Transparent +test +ArraySeq(class java.lang.String, int) +ArraySeq(test, 42) diff --git a/tests/run/i17761.scala b/tests/run/i17761.scala new file mode 100644 index 000000000000..258773aef940 --- /dev/null +++ b/tests/run/i17761.scala @@ -0,0 +1,33 @@ +class MyRecord extends Selectable: + def applyDynamic(name: String, paramClasses: Class[_]*)(args: Any*): Any = { + println(name) + println(paramClasses) + println(args) + () + } + +class MyRecordTransparent extends Selectable: + inline transparent def applyDynamic(name: String, paramClasses: Class[_]*)(args: Any*): Any = { + println(name) + println(paramClasses) + println(args) + () + } + +type Person = MyRecord { + def test(a: String, b: Int): Unit +} + + +type PersonTransparent = MyRecordTransparent { + def test(a: String, b: Int): Unit +} + +val person = MyRecord().asInstanceOf[Person] +val personTransparent = MyRecordTransparent().asInstanceOf[PersonTransparent] + +@main def Test: Unit = + println("Normal") + person.test("test", 42) + println("Transparent") + personTransparent.test("test", 42) \ No newline at end of file From 951bbaef113f51d03942427aec4fb09626f18227 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Wed, 1 Nov 2023 17:44:11 +0100 Subject: [PATCH 176/216] Deprecation warnings for old syntax: `var x = _` * In `3.4` we emit the deprecation warning * In `future` we emit we make this syntax an error * Add patch. Not ideal because we need to use the full path of `uninitialized` --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 13 ++++++++----- .../test/dotty/tools/dotc/CompilationTests.scala | 1 + .../tools/languageserver/DottyLanguageServer.scala | 9 +++++---- .../tools/languageserver/util/PositionContext.scala | 6 ++++-- .../languageserver/util/server/TestServer.scala | 4 +++- .../src/scala/util/control/NonLocalReturns.scala | 4 +++- .../src/main/dotty/tools/pc/MetalsDriver.scala | 4 +++- project/Build.scala | 1 + sbt-test/compilerReporter/i14576/Test.scala | 3 --- sbt-test/compilerReporter/i14576/build.sbt | 2 +- tests/neg/i4812.scala | 6 +++--- tests/neg/uninitialized-3.4.check | 6 ++++++ tests/neg/uninitialized-3.4.scala | 8 ++++++++ tests/neg/uninitialized-future-migration.scala | 8 ++++++++ tests/neg/uninitialized-future.scala | 6 ++++++ tests/patmat/i12805-fallout.scala | 3 ++- tests/pos/uninitialized-future-migration.scala | 6 ++++++ tests/rewrites/uninitialized-var.check | 2 ++ tests/rewrites/uninitialized-var.scala | 2 ++ 19 files changed, 72 insertions(+), 22 deletions(-) create mode 100644 tests/neg/uninitialized-3.4.check create mode 100644 tests/neg/uninitialized-3.4.scala create mode 100644 tests/neg/uninitialized-future-migration.scala create mode 100644 tests/neg/uninitialized-future.scala create mode 100644 tests/pos/uninitialized-future-migration.scala create mode 100644 tests/rewrites/uninitialized-var.check create mode 100644 tests/rewrites/uninitialized-var.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 327f97a3cc9d..878b74a4e9fe 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -3665,11 +3665,14 @@ object Parsers { subExpr() match case rhs0 @ Ident(name) if placeholderParams.nonEmpty && name == placeholderParams.head.name && !tpt.isEmpty && mods.is(Mutable) && lhs.forall(_.isInstanceOf[Ident]) => - if sourceVersion.isAtLeast(future) then - deprecationWarning( - em"""`= _` has been deprecated; use `= uninitialized` instead. - |`uninitialized` can be imported with `scala.compiletime.uninitialized`.""", - rhsOffset) + report.gradualErrorOrMigrationWarning( + em"""`= _` has been deprecated; use `= uninitialized` instead. + |`uninitialized` can be imported with `scala.compiletime.uninitialized`.${rewriteNotice(`3.4-migration`)}""", + in.sourcePos(rhsOffset), + warnFrom = `3.4`, + errorFrom = future) + if sourceVersion.isMigrating && sourceVersion.isAtLeast(`3.4-migration`) then + patch(source, Span(rhsOffset, rhsOffset + 1), "scala.compiletime.uninitialized") placeholderParams = placeholderParams.tail atSpan(rhs0.span) { Ident(nme.WILDCARD) } case rhs0 => rhs0 diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 0491660219b2..21235ac048e2 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -60,6 +60,7 @@ class CompilationTests { compileFile("tests/rewrites/rewrites.scala", defaultOptions.and("-source", "3.0-migration").and("-rewrite", "-indent")), compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")), + compileFile("tests/rewrites/uninitialized-var.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/private-this.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/filtering-fors.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index e7f9c332aeeb..c58b17e49559 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -15,6 +15,7 @@ import scala.collection._ import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import scala.io.Codec +import scala.compiletime.uninitialized import dotc._ import ast.{Trees, tpd, untpd} @@ -54,14 +55,14 @@ class DottyLanguageServer extends LanguageServer import lsp4j.jsonrpc.messages.{Either => JEither} import lsp4j._ - private var rootUri: String = _ + private var rootUri: String = uninitialized - private var myClient: DottyClient = _ + private var myClient: DottyClient = uninitialized def client: DottyClient = myClient - private var myDrivers: mutable.Map[ProjectConfig, InteractiveDriver] = _ + private var myDrivers: mutable.Map[ProjectConfig, InteractiveDriver] = uninitialized - private var myDependentProjects: mutable.Map[ProjectConfig, mutable.Set[ProjectConfig]] = _ + private var myDependentProjects: mutable.Map[ProjectConfig, mutable.Set[ProjectConfig]] = uninitialized def drivers: Map[ProjectConfig, InteractiveDriver] = thisServer.synchronized { if myDrivers == null then diff --git a/language-server/test/dotty/tools/languageserver/util/PositionContext.scala b/language-server/test/dotty/tools/languageserver/util/PositionContext.scala index 10629d900c92..1f73be867c06 100644 --- a/language-server/test/dotty/tools/languageserver/util/PositionContext.scala +++ b/language-server/test/dotty/tools/languageserver/util/PositionContext.scala @@ -3,9 +3,11 @@ package dotty.tools.languageserver.util import dotty.tools.languageserver.util.embedded.CodeMarker import dotty.tools.languageserver.util.server.TestFile +import scala.compiletime.uninitialized + class PositionContext(positionMap: Map[CodeMarker, (TestFile, Int, Int)]) { - private var lastKey: CodeMarker = _ - private var lastValue: (TestFile, Int, Int) = _ + private var lastKey: CodeMarker = uninitialized + private var lastValue: (TestFile, Int, Int) = uninitialized def positionOf(pos: CodeMarker): (TestFile, Int, Int) = { if (lastKey eq pos) lastValue else { diff --git a/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala b/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala index 749b92daba8c..473c14324860 100644 --- a/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala +++ b/language-server/test/dotty/tools/languageserver/util/server/TestServer.scala @@ -7,6 +7,8 @@ import java.nio.file.{Files, Path} import java.nio.charset.StandardCharsets import java.util +import scala.compiletime.uninitialized + import dotty.tools.dotc.Main import dotty.tools.dotc.reporting.{Reporter, ThrowingReporter} import dotty.tools.io.Directory @@ -17,7 +19,7 @@ import org.eclipse.lsp4j.{ DidOpenTextDocumentParams, InitializeParams, Initiali class TestServer(testFolder: Path, projects: List[Project]) { val server = new DottyLanguageServer - var client: TestClient = _ + var client: TestClient = uninitialized init() diff --git a/library/src/scala/util/control/NonLocalReturns.scala b/library/src/scala/util/control/NonLocalReturns.scala index ad4dc05f36ac..c7e600b4c028 100644 --- a/library/src/scala/util/control/NonLocalReturns.scala +++ b/library/src/scala/util/control/NonLocalReturns.scala @@ -1,5 +1,7 @@ package scala.util.control +import scala.compiletime.uninitialized + /** Library implementation of nonlocal return. * * Usage: @@ -21,7 +23,7 @@ package scala.util.control object NonLocalReturns { @deprecated("Use scala.util.boundary.Break instead", "3.3") class ReturnThrowable[T] extends ControlThrowable { - private var myResult: T = _ + private var myResult: T = uninitialized def throwReturn(result: T): Nothing = { myResult = result throw this diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala index 55504db7a11a..819c3f2fc9c9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala @@ -7,6 +7,8 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.reporting.Diagnostic import dotty.tools.dotc.util.SourceFile +import scala.compiletime.uninitialized + /** * MetalsDriver is a wrapper class that provides a compilation cache for InteractiveDriver. * MetalsDriver skips running compilation if @@ -29,7 +31,7 @@ class MetalsDriver( override val settings: List[String] ) extends InteractiveDriver(settings): - @volatile private var lastCompiledURI: URI = _ + @volatile private var lastCompiledURI: URI = uninitialized private def alreadyCompiled(uri: URI, content: Array[Char]): Boolean = compilationUnits.get(uri) match diff --git a/project/Build.scala b/project/Build.scala index 3ac19a0f7b35..ab4cfe19377b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1267,6 +1267,7 @@ object Build { ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + Compile / scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings Compile / sourceGenerators += Def.task { val s = streams.value val cacheDir = s.cacheDirectory diff --git a/sbt-test/compilerReporter/i14576/Test.scala b/sbt-test/compilerReporter/i14576/Test.scala index d94a49145f81..8be852b846a0 100644 --- a/sbt-test/compilerReporter/i14576/Test.scala +++ b/sbt-test/compilerReporter/i14576/Test.scala @@ -10,8 +10,5 @@ object Test: def f(x: Text) = println(x.str) f("abc") - // private[this] and = _ are deprecated under -source:future - private[this] var x: AnyRef = _ - // under -source:future, `_` is deprecated for wildcard arguments of types: use `?` instead val xs: List[_] = Nil diff --git a/sbt-test/compilerReporter/i14576/build.sbt b/sbt-test/compilerReporter/i14576/build.sbt index f9f211b24977..cc0402a7ba5e 100644 --- a/sbt-test/compilerReporter/i14576/build.sbt +++ b/sbt-test/compilerReporter/i14576/build.sbt @@ -24,7 +24,7 @@ lazy val root = (project in file(".")) }, assertDeprecationSummary := { assert { - FakePrintWriter.messages.exists(_.contains("there were 2 deprecation warnings; re-run with -deprecation for details")) + FakePrintWriter.messages.exists(_.contains("there was 1 deprecation warning; re-run with -deprecation for details")) } }, assertNoDeprecationSummary := { diff --git a/tests/neg/i4812.scala b/tests/neg/i4812.scala index 8d518107825c..c6f6dafc656c 100644 --- a/tests/neg/i4812.scala +++ b/tests/neg/i4812.scala @@ -1,6 +1,6 @@ //> using options -Werror object Test: - var prev: Any = _ + var prev: Any = scala.compiletime.uninitialized def test[T](x: T): T = class A(val elem: (T, Boolean)) @@ -55,7 +55,7 @@ object Test: def test6[T](x: T): T = class A { var b: B = null } - class B { var a: A = null; var elem: T = _ } + class B { var a: A = null; var elem: T = scala.compiletime.uninitialized } prev match case prev: A => // error: the type test for A cannot be checked at runtime prev.b.elem @@ -88,7 +88,7 @@ object Test: case x: B => x sealed class A - var prevA: A = _ + var prevA: A = scala.compiletime.uninitialized def test10: A = val methodCallId = System.nanoTime() class B(val id: Long) extends A diff --git a/tests/neg/uninitialized-3.4.check b/tests/neg/uninitialized-3.4.check new file mode 100644 index 000000000000..1c7b985072d0 --- /dev/null +++ b/tests/neg/uninitialized-3.4.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/uninitialized-3.4.scala:7:15 ----------------------------------------------------------------------- +7 | var a: Int = _ // error: migration warning + | ^ + | `= _` has been deprecated; use `= uninitialized` instead. + | `uninitialized` can be imported with `scala.compiletime.uninitialized`. + | This construct can be rewritten automatically under -rewrite -source 3.4-migration. diff --git a/tests/neg/uninitialized-3.4.scala b/tests/neg/uninitialized-3.4.scala new file mode 100644 index 000000000000..174a95ae6c54 --- /dev/null +++ b/tests/neg/uninitialized-3.4.scala @@ -0,0 +1,8 @@ +//> using options -Werror + +import scala.language.`3.4` +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // error: migration warning + var b: Int = uninitialized diff --git a/tests/neg/uninitialized-future-migration.scala b/tests/neg/uninitialized-future-migration.scala new file mode 100644 index 000000000000..05f7c6b67f38 --- /dev/null +++ b/tests/neg/uninitialized-future-migration.scala @@ -0,0 +1,8 @@ +//> using options -Werror + +import scala.language.`future-migration` +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // error: migration warning + var b: Int = uninitialized diff --git a/tests/neg/uninitialized-future.scala b/tests/neg/uninitialized-future.scala new file mode 100644 index 000000000000..8882b70ed48b --- /dev/null +++ b/tests/neg/uninitialized-future.scala @@ -0,0 +1,6 @@ +import scala.language.future +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // error + var b: Int = uninitialized diff --git a/tests/patmat/i12805-fallout.scala b/tests/patmat/i12805-fallout.scala index b598b36159ea..66d9c0d598fd 100644 --- a/tests/patmat/i12805-fallout.scala +++ b/tests/patmat/i12805-fallout.scala @@ -1,4 +1,5 @@ import scala.annotation.unchecked.uncheckedVariance +import scala.compiletime.uninitialized type Untyped = Null @@ -7,7 +8,7 @@ class Type abstract class Tree[-T >: Untyped] { type ThisTree[T >: Untyped] <: Tree[T] - protected var myTpe: T @uncheckedVariance = _ + protected var myTpe: T @uncheckedVariance = uninitialized def withType(tpe: Type): ThisTree[Type] = { val tree = this.asInstanceOf[ThisTree[Type]] diff --git a/tests/pos/uninitialized-future-migration.scala b/tests/pos/uninitialized-future-migration.scala new file mode 100644 index 000000000000..a1e606dc90fb --- /dev/null +++ b/tests/pos/uninitialized-future-migration.scala @@ -0,0 +1,6 @@ +import scala.language.`future-migration` +import scala.compiletime.uninitialized + +class Foo: + var a: Int = _ // warn + var b: Int = uninitialized diff --git a/tests/rewrites/uninitialized-var.check b/tests/rewrites/uninitialized-var.check new file mode 100644 index 000000000000..3809938512a7 --- /dev/null +++ b/tests/rewrites/uninitialized-var.check @@ -0,0 +1,2 @@ +class Foo: + var a: Int = scala.compiletime.uninitialized diff --git a/tests/rewrites/uninitialized-var.scala b/tests/rewrites/uninitialized-var.scala new file mode 100644 index 000000000000..910734b33350 --- /dev/null +++ b/tests/rewrites/uninitialized-var.scala @@ -0,0 +1,2 @@ +class Foo: + var a: Int = _ From c79b8e8f4d2eaf9d2095d82810d1b5b8f6383bbb Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Fri, 3 Nov 2023 14:20:13 +0100 Subject: [PATCH 177/216] Future migration warning for alphanumeric infix operator --- compiler/src/dotty/tools/dotc/typer/Checking.scala | 12 ++++++------ .../test/dotty/tools/dotc/CompilationTests.scala | 1 + tests/neg/rewrite-messages.check | 2 +- tests/rewrites/alphanumeric-infix-operator.check | 3 +++ tests/rewrites/alphanumeric-infix-operator.scala | 3 +++ 5 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 tests/rewrites/alphanumeric-infix-operator.check create mode 100644 tests/rewrites/alphanumeric-infix-operator.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 5fd1e165cf20..21bf0bc81cd0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -1081,8 +1081,7 @@ trait Checking { !name.isOperatorName && !meth.isDeclaredInfix && !meth.maybeOwner.is(Scala2x) && - !infixOKSinceFollowedBy(tree.right) && - sourceVersion.isAtLeast(future) => + !infixOKSinceFollowedBy(tree.right) => val (kind, alternative) = if (ctx.mode.is(Mode.Type)) ("type", (n: Name) => s"prefix syntax $n[...]") @@ -1090,12 +1089,13 @@ trait Checking { ("extractor", (n: Name) => s"prefix syntax $n(...)") else ("method", (n: Name) => s"method syntax .$n(...)") - def rewriteMsg = Message.rewriteNotice("The latter", options = "-deprecation") - report.deprecationWarning( + def rewriteMsg = Message.rewriteNotice("The latter", version = `future-migration`) + report.errorOrMigrationWarning( em"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", - tree.op.srcPos) - if (ctx.settings.deprecation.value) { + tree.op.srcPos, + from = future) + if sourceVersion == `future-migration` then { patch(Span(tree.op.span.start, tree.op.span.start), "`") patch(Span(tree.op.span.end, tree.op.span.end), "`") } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 0491660219b2..5fd3524e208e 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -62,6 +62,7 @@ class CompilationTests { compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")), compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/private-this.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), + compileFile("tests/rewrites/alphanumeric-infix-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/filtering-fors.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/refutable-pattern-bindings.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/i8982.scala", defaultOptions.and("-indent", "-rewrite")), diff --git a/tests/neg/rewrite-messages.check b/tests/neg/rewrite-messages.check index 3ee081edbed2..f368c2dc8997 100644 --- a/tests/neg/rewrite-messages.check +++ b/tests/neg/rewrite-messages.check @@ -8,4 +8,4 @@ | ^^^ | Alphanumeric method foo is not declared infix; it should not be used as infix operator. | Instead, use method syntax .foo(...) or backticked identifier `foo`. - | The latter can be rewritten automatically under -rewrite -deprecation. + | The latter can be rewritten automatically under -rewrite -source future-migration. diff --git a/tests/rewrites/alphanumeric-infix-operator.check b/tests/rewrites/alphanumeric-infix-operator.check new file mode 100644 index 000000000000..8ff077e856cf --- /dev/null +++ b/tests/rewrites/alphanumeric-infix-operator.check @@ -0,0 +1,3 @@ +extension (x: Int) def foo(y: Int): Int = x + y + +def f: Unit = 2 `foo` 4 diff --git a/tests/rewrites/alphanumeric-infix-operator.scala b/tests/rewrites/alphanumeric-infix-operator.scala new file mode 100644 index 000000000000..450f44834f05 --- /dev/null +++ b/tests/rewrites/alphanumeric-infix-operator.scala @@ -0,0 +1,3 @@ +extension (x: Int) def foo(y: Int): Int = x + y + +def f: Unit = 2 foo 4 From 3c09617d2a349c45eed645a652a0d17108f0e177 Mon Sep 17 00:00:00 2001 From: Nicolas Almerge <44474247+NicolasAlmerge@users.noreply.github.com> Date: Thu, 9 Nov 2023 12:00:29 +0100 Subject: [PATCH 178/216] Fixed small typo in doc for File.scala --- compiler/src/dotty/tools/io/File.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index efce60d3f86d..22a0e04b2b48 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -35,7 +35,7 @@ object File { * @author Paul Phillips * @since 2.8 * - * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' + * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars { override val creationCodec: io.Codec = constructorCodec From a1d705993a8f6416581a1bd69db091f237396c47 Mon Sep 17 00:00:00 2001 From: Yichen Xu Date: Thu, 9 Nov 2023 13:55:31 +0100 Subject: [PATCH 179/216] Polishments --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 18 ++++++++++-------- .../captures/cc-setup-impure-classes.scala | 3 ++- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 85942950f317..d29a00a52f56 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -543,7 +543,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: end postProcess end setupTraverser - private def superTypeIsImpure(tp: Type)(using Context): Boolean = { + /** Checks whether an abstract type could be impure. See also: [[needsVariable]]. */ + private def instanceCanBeImpure(tp: Type)(using Context): Boolean = { tp.dealiasKeepAnnots match case CapturingType(_, refs) => !refs.isAlwaysEmpty @@ -552,20 +553,21 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then - sym == defn.AnyClass || !sym.isPureClass + sym == defn.AnyClass // we assume Any is a shorthand of {cap} Any, so if Any is an upper // bound, the type is taken to be impure. + || !sym.isPureClass else - sym != defn.Caps_Cap && superTypeIsImpure(tp.superType) + sym != defn.Caps_Cap && instanceCanBeImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => - superTypeIsImpure(tp.underlying) + instanceCanBeImpure(tp.underlying) case tp: AndType => - superTypeIsImpure(tp.tp1) || superTypeIsImpure(tp.tp2) + instanceCanBeImpure(tp.tp1) || instanceCanBeImpure(tp.tp2) case tp: OrType => - superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) + instanceCanBeImpure(tp.tp1) && instanceCanBeImpure(tp.tp2) case _ => false - }.showing(i"super type is impure $tp = $result", capt) + }.showing(i"instance can be impure $tp = $result", capt) /** Should a capture set variable be added on type `tp`? */ def needsVariable(tp: Type)(using Context): Boolean = { @@ -577,7 +579,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else val tp1 = tp.dealiasKeepAnnots if tp1 ne tp then needsVariable(tp1) - else superTypeIsImpure(tp1) + else instanceCanBeImpure(tp1) case tp: (RefinedOrRecType | MatchType) => needsVariable(tp.underlying) case tp: AndType => diff --git a/tests/pos-custom-args/captures/cc-setup-impure-classes.scala b/tests/pos-custom-args/captures/cc-setup-impure-classes.scala index db88851b6a52..04dfb665b6d4 100644 --- a/tests/pos-custom-args/captures/cc-setup-impure-classes.scala +++ b/tests/pos-custom-args/captures/cc-setup-impure-classes.scala @@ -2,4 +2,5 @@ import language.experimental.captureChecking trait Resource def id[X](x: X): x.type = x -def foo[M <: Resource](r: M^): Unit = id(r) +def foo[M <: Resource](r: M^): Unit = id(r) // was error, should be ok +def bar[M](r: M^): Unit = id(r) // ok From 6d7aa994ac37207a4a34d2d66fc862db66761a86 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Thu, 9 Nov 2023 16:46:44 +0100 Subject: [PATCH 180/216] Do not show deprecation waning for `_` in type match case Fixes #18808 --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 12 ++++++++++-- compiler/test-resources/repl/i13208.scala | 5 ----- tests/pos/i18808.scala | 9 +++++++++ 3 files changed, 19 insertions(+), 7 deletions(-) create mode 100644 tests/pos/i18808.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 327f97a3cc9d..5642443eae73 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -413,6 +413,14 @@ object Parsers { finally inEnum = saved } + private var inTypeMatchPattern = false + private def withinTypeMatchPattern[T](body: => T): T = { + val saved = inTypeMatchPattern + inTypeMatchPattern = true + try body + finally inTypeMatchPattern = saved + } + private var staged = StageKind.None def withinStaged[T](kind: StageKind)(op: => T): T = { val saved = staged @@ -1862,7 +1870,7 @@ object Parsers { val start = in.skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else - if sourceVersion.isAtLeast(future) then + if !inTypeMatchPattern && sourceVersion.isAtLeast(future) then deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead") patch(source, Span(in.offset, in.offset + 1), "?") val start = in.skipToken() @@ -2898,7 +2906,7 @@ object Parsers { val start = in.skipToken() Ident(tpnme.WILDCARD).withSpan(Span(start, in.lastOffset, start)) case _ => - rejectWildcardType(infixType()) + withinTypeMatchPattern(rejectWildcardType(infixType())) } } CaseDef(pat, EmptyTree, atSpan(accept(ARROW)) { diff --git a/compiler/test-resources/repl/i13208.scala b/compiler/test-resources/repl/i13208.scala index 61ace43c732d..07cc67d3bf0b 100644 --- a/compiler/test-resources/repl/i13208.scala +++ b/compiler/test-resources/repl/i13208.scala @@ -1,8 +1,3 @@ //> using options -source:future -deprecation scala> type M[X] = X match { case Int => String case _ => Int } scala> type N[X] = X match { case List[_] => Int } -1 warning found --- Deprecation Warning: -------------------------------------------------------- -1 | type N[X] = X match { case List[_] => Int } - | ^ - | `_` is deprecated for wildcard arguments of types: use `?` instead diff --git a/tests/pos/i18808.scala b/tests/pos/i18808.scala new file mode 100644 index 000000000000..0556b3285d00 --- /dev/null +++ b/tests/pos/i18808.scala @@ -0,0 +1,9 @@ +//> using options -Werror + +import language.future + +type F[X] = X match + case List[_] => Int + +type G[X] = X match + case List[?] => Int From 032f6cdab955200487319147c2e12cf9d48175f5 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 7 Nov 2023 16:45:13 +0100 Subject: [PATCH 181/216] Move Scala 2 library tests from Windows into Linux --- .github/workflows/ci.yaml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 636b91084529..5f5f616b76da 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -142,6 +142,9 @@ jobs: run: | ./project/scripts/sbt ";sjsSandbox/run ;sjsSandbox/test ;sjsJUnitTests/test ;set sjsJUnitTests/scalaJSLinkerConfig ~= switchToESModules ;sjsJUnitTests/test ;sjsCompilerTests/test" + - name: Test with Scala 2 library TASTy + run: ./project/scripts/sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/testCompilation i5" # only test a subset of test to avoid doubling the CI execution time + test_windows_fast: runs-on: [self-hosted, Windows] if: "( @@ -209,10 +212,6 @@ jobs: run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" shell: cmd - - name: Test with Scala 2 library TASTy - run: sbt ";set ThisBuild/Build.useScala2LibraryTasty := true ;scala3-bootstrapped/testCompilation i5" # only test a subset of test to avoid doubling the CI execution time - shell: cmd - - name: Scala.js Test run: sbt ";sjsJUnitTests/test ;set sjsJUnitTests/scalaJSLinkerConfig ~= switchToESModules ;sjsJUnitTests/test ;sjsCompilerTests/test" shell: cmd From 681f1829e53901eea650ebe85bc1ec329857fd4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C4=99drzej=20Rochala?= <48657087+rochala@users.noreply.github.com> Date: Fri, 10 Nov 2023 11:45:50 +0100 Subject: [PATCH 182/216] Remove redundant path calculation from Completions in PC (#18889) --- .../tools/pc/completions/CompletionProvider.scala | 14 +++++--------- .../dotty/tools/pc/completions/Completions.scala | 2 +- .../tools/pc/completions/KeywordsCompletions.scala | 4 ++-- .../tools/pc/completions/ScalaCliCompletions.scala | 7 +++---- 4 files changed, 11 insertions(+), 16 deletions(-) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 13a6e7cdb7cb..0bc558aaa3cf 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -52,19 +52,15 @@ class CompletionProvider( val pos = driver.sourcePosition(params) val (items, isIncomplete) = driver.compilationUnits.get(uri) match case Some(unit) => - val path = - Interactive.pathTo(driver.openedTrees(uri), pos)(using ctx) val newctx = ctx.fresh.setCompilationUnit(unit) - val tpdPath = - Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)( - using newctx - ) + val tpdPath = Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)(using newctx) + val locatedCtx = Interactive.contextOfPath(tpdPath)(using newctx) val indexedCtx = IndexedContext(locatedCtx) val completionPos = - CompletionPos.infer(pos, params, path)(using newctx) + CompletionPos.infer(pos, params, tpdPath)(using newctx) val autoImportsGen = AutoImports.generator( completionPos.sourcePos, text, @@ -82,7 +78,7 @@ class CompletionProvider( buildTargetIdentifier, completionPos, indexedCtx, - path, + tpdPath, config, folderPath, autoImportsGen, @@ -96,7 +92,7 @@ class CompletionProvider( idx, autoImportsGen, completionPos, - path, + tpdPath, indexedCtx )(using newctx) } diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 557d1762720b..6c53488e076e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -121,7 +121,7 @@ class Completions( val allAdvanced = advanced ++ keywords path match // should not show completions for toplevel - case Nil if pos.source.file.extension != "sc" => + case Nil | (_: PackageDef) :: _ if pos.source.file.extension != "sc" => (allAdvanced, SymbolSearch.Result.COMPLETE) case Select(qual, _) :: _ if qual.tpe.isErroneous => (allAdvanced, SymbolSearch.Result.COMPLETE) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala index 9f1a5a0e9bff..0dbd9a8e4ee2 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/KeywordsCompletions.scala @@ -26,7 +26,7 @@ object KeywordsCompletions: checkIfNotInComment(completionPos.cursorPos, comments) path match - case Nil if completionPos.query.isEmpty() => + case Nil | (_: PackageDef) :: _ if completionPos.query.isEmpty() => Keyword.all.collect { // topelevel definitions are allowed in Scala 3 case kw if (kw.isPackage || kw.isTemplate) && notInComment => @@ -78,7 +78,7 @@ object KeywordsCompletions: private def isPackage(enclosing: List[Tree]): Boolean = enclosing match - case Nil => true + case Nil | (_: PackageDef) :: _ => true case _ => false private def isParam(enclosing: List[Tree]): Boolean = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala index 551322c8ac43..fce35ab69ce3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala @@ -16,12 +16,11 @@ class ScalaCliCompletions( pos.lineContent.take(pos.column).stripPrefix("/*