Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit b0059ff

Browse files
committedFeb 6, 2025··
chore: first draft of the stdlib
1 parent 4cb43b6 commit b0059ff

File tree

14 files changed

+3936
-80
lines changed

14 files changed

+3936
-80
lines changed
 

Diff for: ‎compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -861,7 +861,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) {
861861
assert(isModifierTag(tag))
862862
writeByte(tag)
863863
}
864-
assert(!flags.is(Scala2x))
864+
//assert(!flags.is(Scala2x))
865865
if (flags.is(Private)) writeModTag(PRIVATE)
866866
if (flags.is(Protected)) writeModTag(PROTECTED)
867867
if (flags.is(Final, butNot = Module)) writeModTag(FINAL)

Diff for: ‎compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -77,9 +77,9 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete
7777
// Create extension methods, except if the class comes from Scala 2
7878
// because it adds extension methods before pickling.
7979
if !valueClass.is(Scala2x, butNot = Scala2Tasty) then
80-
for (decl <- valueClass.classInfo.decls)
81-
if isMethodWithExtension(decl) then
82-
enterInModuleClass(createExtensionMethod(decl, moduleClassSym.symbol))
80+
for (decl <- valueClass.classInfo.decls)
81+
if isMethodWithExtension(decl) then
82+
enterInModuleClass(createExtensionMethod(decl, moduleClassSym.symbol))
8383

8484
// Create synthetic methods to cast values between the underlying type
8585
// and the ErasedValueType. These methods are removed in ElimErasedValueType.

Diff for: ‎compiler/src/dotty/tools/dotc/typer/Namer.scala

+3-1
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,9 @@ class Namer { typer: Typer =>
246246

247247
tree match {
248248
case tree: TypeDef if tree.isClassDef =>
249-
val flags = checkFlags(tree.mods.flags)
249+
var flags = checkFlags(tree.mods.flags)
250+
if ctx.settings.YcompileScala2Library.value then
251+
flags |= Scala2x | Scala2Tasty
250252
val name = checkNoConflict(tree.name, flags.is(Private), tree.span).asTypeName
251253
val cls =
252254
createOrRefine[ClassSymbol](tree, name, flags, ctx.owner,

Diff for: ‎project/Build.scala

+5-2
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ import sbt.Package.ManifestAttributes
2020
import sbt.PublishBinPlugin.autoImport._
2121
import dotty.tools.sbtplugin.RepublishPlugin
2222
import dotty.tools.sbtplugin.RepublishPlugin.autoImport._
23+
import dotty.tools.sbtplugin.ScalaLibraryPlugin
24+
2325
import sbt.plugins.SbtPlugin
2426
import sbt.ScriptedPlugin.autoImport._
2527
import xerial.sbt.Sonatype.autoImport._
@@ -32,6 +34,8 @@ import sbtbuildinfo.BuildInfoPlugin.autoImport._
3234
import sbttastymima.TastyMiMaPlugin
3335
import sbttastymima.TastyMiMaPlugin.autoImport._
3436

37+
import scala.jdk.CollectionConverters._
38+
3539
import scala.util.Properties.isJavaAtLeast
3640

3741
import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
@@ -1207,9 +1211,9 @@ object Build {
12071211
* This version of the library is not (yet) TASTy/binary compatible with the Scala 2 compiled library.
12081212
*/
12091213
lazy val `scala2-library-bootstrapped` = project.in(file("scala2-library-bootstrapped")).
1214+
enablePlugins(ScalaLibraryPlugin).
12101215
withCommonSettings(Bootstrapped).
12111216
dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test").
1212-
settings(commonBootstrappedSettings).
12131217
settings(scala2LibraryBootstrappedSettings).
12141218
settings(moduleName := "scala2-library")
12151219
// -Ycheck:all is set in project/scripts/scala2-library-tasty-mima.sh
@@ -1221,7 +1225,6 @@ object Build {
12211225
lazy val `scala2-library-cc` = project.in(file("scala2-library-cc")).
12221226
withCommonSettings(Bootstrapped).
12231227
dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test").
1224-
settings(commonBootstrappedSettings).
12251228
settings(scala2LibraryBootstrappedSettings).
12261229
settings(
12271230
moduleName := "scala2-library-cc",

Diff for: ‎project/Scala2LibraryBootstrappedMiMaFilters.scala

+27-73
Original file line numberDiff line numberDiff line change
@@ -4,79 +4,33 @@ import com.typesafe.tools.mima.core._
44
object Scala2LibraryBootstrappedMiMaFilters {
55

66
val BackwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map(
7-
Build.stdlibBootstrappedVersion -> {
8-
Seq(
9-
// Files that are not compiled in the bootstrapped library
10-
ProblemFilters.exclude[MissingClassProblem]("scala.AnyVal"),
11-
12-
// Scala language features
13-
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language.<clinit>"),
14-
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language#experimental.<clinit>"),
15-
ProblemFilters.exclude[FinalClassProblem]("scala.language$experimental$"),
16-
ProblemFilters.exclude[FinalClassProblem]("scala.languageFeature$*$"),
17-
18-
// Value class extension methods
19-
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*$extension"),
20-
21-
// Companion module class
22-
ProblemFilters.exclude[FinalClassProblem]("scala.*$"),
23-
24-
// Scala 2 intrinsic macros
25-
ProblemFilters.exclude[FinalMethodProblem]("scala.StringContext.s"),
26-
27-
// Specialization?
28-
ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple1._1"), // field _1 in class scala.Tuple1 does not have a correspondent in current version
29-
ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple2._1"), // field _1 in class scala.Tuple2 does not have a correspondent in current version
30-
ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple2._2"), // field _2 in class scala.Tuple2 does not have a correspondent in current version
31-
32-
// Scala 2 specialization
33-
ProblemFilters.exclude[MissingClassProblem]("scala.*$sp"),
34-
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*$sp"),
35-
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*#*#sp.$init$"),
36-
ProblemFilters.exclude[MissingTypesProblem]("scala.collection.DoubleStepper"),
37-
ProblemFilters.exclude[MissingTypesProblem]("scala.collection.immutable.DoubleVectorStepper"),
38-
ProblemFilters.exclude[MissingTypesProblem]("scala.collection.immutable.IntVectorStepper"),
39-
ProblemFilters.exclude[MissingTypesProblem]("scala.collection.immutable.LongVectorStepper"),
40-
ProblemFilters.exclude[MissingTypesProblem]("scala.collection.IntStepper"),
41-
ProblemFilters.exclude[MissingTypesProblem]("scala.collection.LongStepper"),
42-
ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.DoubleAccumulator"),
43-
ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.FunctionWrappers$*"),
44-
ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.IntAccumulator"),
45-
ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.LongAccumulator"),
46-
ProblemFilters.exclude[FinalClassProblem]("scala.collection.ArrayOps$ReverseIterator"),
47-
ProblemFilters.exclude[FinalClassProblem]("scala.Tuple1"),
48-
ProblemFilters.exclude[FinalClassProblem]("scala.Tuple2"),
49-
50-
// other
51-
ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueOrdering"),
52-
ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueSet"),
53-
ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.NoPositioner"),
54-
ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPosition"),
55-
ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPositioner"),
56-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.SortedMapOps.coll"),
57-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.empty"),
58-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.fromSpecific"),
59-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.ArrayBuilder#ofUnit.addAll"),
60-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.empty"),
61-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.fromSpecific"),
62-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NothingManifest.newArray"),
63-
ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NullManifest.newArray"),
64-
ProblemFilters.exclude[MissingFieldProblem]("scala.collection.ArrayOps#ReverseIterator.xs"),
65-
ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.NonLocalReturnControl.value"),
66-
ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.collection.immutable.SortedMapOps.coll"),
67-
) ++
68-
Seq( // DirectMissingMethodProblem
69-
"scala.collection.LinearSeqIterator#LazyCell.this",
70-
"scala.collection.mutable.PriorityQueue#ResizableArrayAccess.this",
71-
"scala.concurrent.BatchingExecutor#AbstractBatch.this",
72-
"scala.concurrent.Channel#LinkedList.this",
73-
"scala.Enumeration#ValueOrdering.this",
74-
"scala.io.Source#RelaxedPosition.this",
75-
"scala.collection.IterableOnceOps#Maximized.this", // New in 2.13.11: private inner class
76-
"scala.util.Properties.<clinit>",
77-
"scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5",
78-
).map(ProblemFilters.exclude[DirectMissingMethodProblem])
79-
}
7+
Build.stdlibBootstrappedVersion -> Seq(
8+
// Scala language features (not really a problem)
9+
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language.<clinit>"),
10+
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language#experimental.<clinit>"),
11+
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.Properties.<clinit>"),
12+
13+
// Companion module class (not really a problem)
14+
ProblemFilters.exclude[FinalClassProblem]("scala.*$"),
15+
ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.NoPositioner"),
16+
ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPosition"),
17+
ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPositioner"),
18+
ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueOrdering"),
19+
ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueSet"),
20+
ProblemFilters.exclude[FinalMethodProblem]("scala.StringContext.s"),
21+
22+
// Need to be fixed
23+
ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*$extension"),
24+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.SortedMapOps.coll"),
25+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.empty"),
26+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.fromSpecific"),
27+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.ArrayBuilder#ofUnit.addAll"),
28+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.empty"),
29+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.fromSpecific"),
30+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NothingManifest.newArray"),
31+
//ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NullManifest.newArray"),
32+
//ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.collection.immutable.SortedMapOps.coll"),
33+
)
8034
)
8135

8236
val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map(

Diff for: ‎project/ScalaLibraryPlugin.scala

+108
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
package dotty.tools.sbtplugin
2+
3+
import sbt.*
4+
import sbt.Keys.*
5+
import scala.jdk.CollectionConverters.*
6+
import java.nio.file.Files
7+
8+
object ScalaLibraryPlugin extends AutoPlugin {
9+
10+
override def trigger = noTrigger
11+
12+
val fetchScala2ClassFiles = taskKey[(Set[File], File)]("Fetch the files to use that were compiled with Scala 2")
13+
//val scala2LibraryVersion = settingKey[String]("Version of the Scala 2 Standard Library")
14+
15+
override def projectSettings = Seq (
16+
fetchScala2ClassFiles := {
17+
val stream = streams.value
18+
val cache = stream.cacheDirectory
19+
val target = cache / "scala-library-classes"
20+
val report = update.value
21+
22+
val scalaLibraryBinaryJar = report.select(
23+
configuration = configurationFilter(),
24+
module = (_: ModuleID).name == "scala-library",
25+
artifact = artifactFilter(`type` = "jar")).headOption.getOrElse {
26+
sys.error(s"Could not fetch scala-library binary JAR")
27+
}
28+
29+
if (!target.exists()) {
30+
IO.createDirectory(target)
31+
}
32+
33+
(FileFunction.cached(cache / "fetch-scala-library-classes", FilesInfo.lastModified, FilesInfo.exists) { _ =>
34+
stream.log.info(s"Unpacking scala-library binaries to persistent directory: ${target.getAbsolutePath}")
35+
IO.unzip(scalaLibraryBinaryJar, target)
36+
(target ** "*.class").get.toSet
37+
} (Set(scalaLibraryBinaryJar)), target)
38+
39+
},
40+
(Compile / compile) := {
41+
val stream = streams.value
42+
val target = (Compile / classDirectory).value
43+
val (files, reference) = fetchScala2ClassFiles.value;
44+
val analysis = (Compile / compile).value
45+
stream.log.info(s"Copying files from Scala 2 Standard Library to $target")
46+
for (file <- files; id <- file.relativeTo(reference).map(_.toString())) {
47+
if (filesToCopy(id)) {
48+
stream.log.debug(s"Copying file '${id}' to ${target / id}")
49+
IO.copyFile(file, target / id)
50+
}
51+
}
52+
53+
val overwrittenBinaries = Files.walk((Compile / classDirectory).value.toPath())
54+
.iterator()
55+
.asScala
56+
.map(_.toFile)
57+
.map(_.relativeTo((Compile / classDirectory).value).get)
58+
.toSet
59+
val diff = files.filterNot(_.relativeTo(reference).exists(overwrittenBinaries))
60+
61+
IO.copy(diff.map { file =>
62+
file -> (Compile / classDirectory).value / file.relativeTo(reference).get.getPath
63+
})
64+
65+
analysis
66+
}
67+
)
68+
69+
private lazy val filesToCopy = Set(
70+
"scala/Tuple1.class",
71+
"scala/Tuple2.class",
72+
"scala/collection/DoubleStepper.class",
73+
"scala/collection/IntStepper.class",
74+
"scala/collection/LongStepper.class",
75+
"scala/collection/immutable/DoubleVectorStepper.class",
76+
"scala/collection/immutable/IntVectorStepper.class",
77+
"scala/collection/immutable/LongVectorStepper.class",
78+
"scala/jdk/DoubleAccumulator.class",
79+
"scala/jdk/IntAccumulator.class",
80+
"scala/jdk/LongAccumulator.class",
81+
"scala/jdk/FunctionWrappers$FromJavaDoubleBinaryOperator.class",
82+
"scala/jdk/FunctionWrappers$FromJavaBooleanSupplier.class",
83+
"scala/jdk/FunctionWrappers$FromJavaDoubleConsumer.class",
84+
"scala/jdk/FunctionWrappers$FromJavaDoublePredicate.class",
85+
"scala/jdk/FunctionWrappers$FromJavaDoubleSupplier.class",
86+
"scala/jdk/FunctionWrappers$FromJavaDoubleToIntFunction.class",
87+
"scala/jdk/FunctionWrappers$FromJavaDoubleToLongFunction.class",
88+
"scala/jdk/FunctionWrappers$FromJavaIntBinaryOperator.class",
89+
"scala/jdk/FunctionWrappers$FromJavaDoubleUnaryOperator.class",
90+
"scala/jdk/FunctionWrappers$FromJavaIntPredicate.class",
91+
"scala/jdk/FunctionWrappers$FromJavaIntConsumer.class",
92+
"scala/jdk/FunctionWrappers$FromJavaIntSupplier.class",
93+
"scala/jdk/FunctionWrappers$FromJavaIntToDoubleFunction.class",
94+
"scala/jdk/FunctionWrappers$FromJavaIntToLongFunction.class",
95+
"scala/jdk/FunctionWrappers$FromJavaIntUnaryOperator.class",
96+
"scala/jdk/FunctionWrappers$FromJavaLongBinaryOperator.class",
97+
"scala/jdk/FunctionWrappers$FromJavaLongConsumer.class",
98+
"scala/jdk/FunctionWrappers$FromJavaLongPredicate.class",
99+
"scala/jdk/FunctionWrappers$FromJavaLongSupplier.class",
100+
"scala/jdk/FunctionWrappers$FromJavaLongToDoubleFunction.class",
101+
"scala/jdk/FunctionWrappers$FromJavaLongToIntFunction.class",
102+
"scala/jdk/FunctionWrappers$FromJavaLongUnaryOperator.class",
103+
"scala/collection/ArrayOps$ReverseIterator.class",
104+
"scala/runtime/NonLocalReturnControl.class",
105+
"scala/util/Sorting.class", "scala/util/Sorting$.class", // Contains @specialized annotation
106+
)
107+
108+
}
+352
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,352 @@
1+
/*
2+
* Scala (https://www.scala-lang.org)
3+
*
4+
* Copyright EPFL and Lightbend, Inc. dba Akka
5+
*
6+
* Licensed under Apache License 2.0
7+
* (http://www.apache.org/licenses/LICENSE-2.0).
8+
*
9+
* See the NOTICE file distributed with this work for
10+
* additional information regarding copyright ownership.
11+
*/
12+
13+
package scala
14+
15+
import scala.collection.{SpecificIterableFactory, StrictOptimizedIterableOps, View, immutable, mutable}
16+
import java.lang.reflect.{Field => JField, Method => JMethod}
17+
18+
import scala.annotation.{implicitNotFound, tailrec}
19+
import scala.reflect.NameTransformer._
20+
import scala.util.matching.Regex
21+
import java.{util => ju}
22+
23+
/** Defines a finite set of values specific to the enumeration. Typically
24+
* these values enumerate all possible forms something can take and provide
25+
* a lightweight alternative to case classes.
26+
*
27+
* Each call to a `Value` method adds a new unique value to the enumeration.
28+
* To be accessible, these values are usually defined as `val` members of
29+
* the enumeration.
30+
*
31+
* All values in an enumeration share a common, unique type defined as the
32+
* `Value` type member of the enumeration (`Value` selected on the stable
33+
* identifier path of the enumeration instance).
34+
*
35+
* Values SHOULD NOT be added to an enumeration after its construction;
36+
* doing so makes the enumeration thread-unsafe. If values are added to an
37+
* enumeration from multiple threads (in a non-synchronized fashion) after
38+
* construction, the behavior of the enumeration is undefined.
39+
*
40+
* @example {{{
41+
* // Define a new enumeration with a type alias and work with the full set of enumerated values
42+
* object WeekDay extends Enumeration {
43+
* type WeekDay = Value
44+
* val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
45+
* }
46+
* import WeekDay._
47+
*
48+
* def isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun)
49+
*
50+
* WeekDay.values filter isWorkingDay foreach println
51+
* // output:
52+
* // Mon
53+
* // Tue
54+
* // Wed
55+
* // Thu
56+
* // Fri
57+
* }}}
58+
*
59+
* @example {{{
60+
* // Example of adding attributes to an enumeration by extending the Enumeration.Val class
61+
* object Planet extends Enumeration {
62+
* protected case class PlanetVal(mass: Double, radius: Double) extends super.Val {
63+
* def surfaceGravity: Double = Planet.G * mass / (radius * radius)
64+
* def surfaceWeight(otherMass: Double): Double = otherMass * surfaceGravity
65+
* }
66+
* import scala.language.implicitConversions
67+
* implicit def valueToPlanetVal(x: Value): PlanetVal = x.asInstanceOf[PlanetVal]
68+
*
69+
* val G: Double = 6.67300E-11
70+
* val Mercury = PlanetVal(3.303e+23, 2.4397e6)
71+
* val Venus = PlanetVal(4.869e+24, 6.0518e6)
72+
* val Earth = PlanetVal(5.976e+24, 6.37814e6)
73+
* val Mars = PlanetVal(6.421e+23, 3.3972e6)
74+
* val Jupiter = PlanetVal(1.9e+27, 7.1492e7)
75+
* val Saturn = PlanetVal(5.688e+26, 6.0268e7)
76+
* val Uranus = PlanetVal(8.686e+25, 2.5559e7)
77+
* val Neptune = PlanetVal(1.024e+26, 2.4746e7)
78+
* }
79+
*
80+
* println(Planet.values.filter(_.radius > 7.0e6))
81+
* // output:
82+
* // Planet.ValueSet(Jupiter, Saturn, Uranus, Neptune)
83+
* }}}
84+
*
85+
* @param initial The initial value from which to count the integers that
86+
* identifies values at run-time.
87+
*/
88+
@SerialVersionUID(8476000850333817230L)
89+
abstract class Enumeration (initial: Int) extends Serializable {
90+
thisenum =>
91+
92+
def this() = this(0)
93+
94+
/* Note that `readResolve` cannot be private, since otherwise
95+
the JVM does not invoke it when deserializing subclasses. */
96+
protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null)
97+
98+
/** The name of this enumeration.
99+
*/
100+
override def toString: String =
101+
((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
102+
Regex.quote(NAME_JOIN_STRING)).last
103+
104+
/** The mapping from the integer used to identify values to the actual
105+
* values. */
106+
private val vmap: mutable.Map[Int, Value] = new mutable.HashMap
107+
108+
/** The cache listing all values of this enumeration. */
109+
@transient private var vset: ValueSet = null
110+
@transient @volatile private var vsetDefined = false
111+
112+
/** The mapping from the integer used to identify values to their
113+
* names. */
114+
private[this] val nmap: mutable.Map[Int, String] = new mutable.HashMap
115+
116+
/** The values of this enumeration as a set.
117+
*/
118+
def values: ValueSet = {
119+
if (!vsetDefined) {
120+
vset = (ValueSet.newBuilder ++= vmap.values).result()
121+
vsetDefined = true
122+
}
123+
vset
124+
}
125+
126+
/** The integer to use to identify the next created value. */
127+
protected var nextId: Int = initial
128+
129+
/** The string to use to name the next created value. */
130+
protected var nextName: Iterator[String] = _
131+
132+
private def nextNameOrNull =
133+
if (nextName != null && nextName.hasNext) nextName.next() else null
134+
135+
/** The highest integer amongst those used to identify values in this
136+
* enumeration. */
137+
private[this] var topId = initial
138+
139+
/** The lowest integer amongst those used to identify values in this
140+
* enumeration, but no higher than 0. */
141+
private[this] var bottomId = if(initial < 0) initial else 0
142+
143+
/** The one higher than the highest integer amongst those used to identify
144+
* values in this enumeration. */
145+
final def maxId = topId
146+
147+
/** The value of this enumeration with given id `x`
148+
*/
149+
final def apply(x: Int): Value = vmap(x)
150+
151+
/** Return a `Value` from this `Enumeration` whose name matches
152+
* the argument `s`. The names are determined automatically via reflection.
153+
*
154+
* @param s an `Enumeration` name
155+
* @return the `Value` of this `Enumeration` if its name matches `s`
156+
* @throws NoSuchElementException if no `Value` with a matching
157+
* name is in this `Enumeration`
158+
*/
159+
final def withName(s: String): Value = values.byName.getOrElse(s,
160+
throw new NoSuchElementException(s"No value found for '$s'"))
161+
162+
/** Creates a fresh value, part of this enumeration. */
163+
protected final def Value: Value = Value(nextId)
164+
165+
/** Creates a fresh value, part of this enumeration, identified by the
166+
* integer `i`.
167+
*
168+
* @param i An integer that identifies this value at run-time. It must be
169+
* unique amongst all values of the enumeration.
170+
* @return Fresh value identified by `i`.
171+
*/
172+
protected final def Value(i: Int): Value = Value(i, nextNameOrNull)
173+
174+
/** Creates a fresh value, part of this enumeration, called `name`.
175+
*
176+
* @param name A human-readable name for that value.
177+
* @return Fresh value called `name`.
178+
*/
179+
protected final def Value(name: String): Value = Value(nextId, name)
180+
181+
/** Creates a fresh value, part of this enumeration, called `name`
182+
* and identified by the integer `i`.
183+
*
184+
* @param i An integer that identifies this value at run-time. It must be
185+
* unique amongst all values of the enumeration.
186+
* @param name A human-readable name for that value.
187+
* @return Fresh value with the provided identifier `i` and name `name`.
188+
*/
189+
protected final def Value(i: Int, name: String): Value = new Val(i, name)
190+
191+
private def populateNameMap(): Unit = {
192+
@tailrec def getFields(clazz: Class[_], acc: Array[JField]): Array[JField] = {
193+
if (clazz == null)
194+
acc
195+
else
196+
getFields(clazz.getSuperclass, if (clazz.getDeclaredFields.isEmpty) acc else acc ++ clazz.getDeclaredFields)
197+
}
198+
val fields = getFields(getClass.getSuperclass, getClass.getDeclaredFields)
199+
def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
200+
201+
// The list of possible Value methods: 0-args which return a conforming type
202+
val methods: Array[JMethod] = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
203+
classOf[Value].isAssignableFrom(m.getReturnType) &&
204+
m.getDeclaringClass != classOf[Enumeration] &&
205+
isValDef(m))
206+
methods foreach { m =>
207+
val name = m.getName
208+
// invoke method to obtain actual `Value` instance
209+
val value = m.invoke(this).asInstanceOf[Value]
210+
// verify that outer points to the correct Enumeration: ticket #3616.
211+
if (value.outerEnum eq thisenum) {
212+
val id: Int = value.id
213+
nmap += ((id, name))
214+
}
215+
}
216+
}
217+
218+
/* Obtains the name for the value with id `i`. If no name is cached
219+
* in `nmap`, it populates `nmap` using reflection.
220+
*/
221+
private def nameOf(i: Int): String = synchronized { nmap.getOrElse(i, { populateNameMap() ; nmap(i) }) }
222+
223+
/** The type of the enumerated values. */
224+
@SerialVersionUID(7091335633555234129L)
225+
abstract class Value extends Ordered[Value] with Serializable {
226+
/** the id and bit location of this enumeration value */
227+
def id: Int
228+
/** a marker so we can tell whose values belong to whom come reflective-naming time */
229+
private[Enumeration] val outerEnum = thisenum
230+
231+
override def compare(that: Value): Int =
232+
if (this.id < that.id) -1
233+
else if (this.id == that.id) 0
234+
else 1
235+
override def equals(other: Any): Boolean = other match {
236+
case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id)
237+
case _ => false
238+
}
239+
override def hashCode: Int = id.##
240+
241+
/** Create a ValueSet which contains this value and another one */
242+
def + (v: Value): ValueSet = ValueSet(this, v)
243+
}
244+
245+
/** A class implementing the [[scala.Enumeration.Value]] type. This class
246+
* can be overridden to change the enumeration's naming and integer
247+
* identification behaviour.
248+
*/
249+
@SerialVersionUID(0 - 3501153230598116017L)
250+
protected class Val(i: Int, name: String) extends Value with Serializable {
251+
def this(i: Int) = this(i, nextNameOrNull)
252+
def this(name: String) = this(nextId, name)
253+
def this() = this(nextId)
254+
255+
assert(!vmap.isDefinedAt(i), "Duplicate id: " + i)
256+
vmap(i) = this
257+
vsetDefined = false
258+
nextId = i + 1
259+
if (nextId > topId) topId = nextId
260+
if (i < bottomId) bottomId = i
261+
def id: Int = i
262+
override def toString(): String =
263+
if (name != null) name
264+
else try thisenum.nameOf(i)
265+
catch { case _: NoSuchElementException => "<Invalid enum: no field for #" + i + ">" }
266+
267+
protected def readResolve(): AnyRef = {
268+
val enumeration = thisenum.readResolve().asInstanceOf[Enumeration]
269+
if (enumeration.vmap == null) this
270+
else enumeration.vmap(i)
271+
}
272+
}
273+
274+
/** An ordering by id for values of this set */
275+
implicit object ValueOrdering extends Ordering[Value] {
276+
private val _ = Enumeration.this
277+
def compare(x: Value, y: Value): Int = x compare y
278+
}
279+
280+
/** A class for sets of values.
281+
* Iterating through this set will yield values in increasing order of their ids.
282+
*
283+
* @param nnIds The set of ids of values (adjusted so that the lowest value does
284+
* not fall below zero), organized as a `BitSet`.
285+
* @define Coll `collection.immutable.SortedSet`
286+
*/
287+
@SerialVersionUID(7229671200427364242L)
288+
class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
289+
extends immutable.AbstractSet[Value]
290+
with immutable.SortedSet[Value]
291+
with immutable.SortedSetOps[Value, immutable.SortedSet, ValueSet]
292+
with StrictOptimizedIterableOps[Value, immutable.Set, ValueSet]
293+
with Serializable {
294+
295+
implicit def ordering: Ordering[Value] = ValueOrdering
296+
def rangeImpl(from: Option[Value], until: Option[Value]): ValueSet =
297+
new ValueSet(nnIds.rangeImpl(from.map(_.id - bottomId), until.map(_.id - bottomId)))
298+
299+
override def empty: ValueSet = ValueSet.empty
300+
override def knownSize: Int = nnIds.size
301+
override def isEmpty: Boolean = nnIds.isEmpty
302+
def contains(v: Value): Boolean = nnIds contains (v.id - bottomId)
303+
def incl (value: Value): ValueSet = new ValueSet(nnIds + (value.id - bottomId))
304+
def excl (value: Value): ValueSet = new ValueSet(nnIds - (value.id - bottomId))
305+
def iterator: Iterator[Value] = nnIds.iterator map (id => thisenum.apply(bottomId + id))
306+
override def iteratorFrom(start: Value): Iterator[Value] = nnIds iteratorFrom start.id map (id => thisenum.apply(bottomId + id))
307+
override def className: String = s"$thisenum.ValueSet"
308+
/** Creates a bit mask for the zero-adjusted ids in this set as a
309+
* new array of longs */
310+
def toBitMask: Array[Long] = nnIds.toBitMask
311+
312+
override protected def fromSpecific(coll: IterableOnce[Value]): ValueSet = ValueSet.fromSpecific(coll)
313+
override protected def newSpecificBuilder = ValueSet.newBuilder
314+
315+
def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(this, f))
316+
def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(this, f))
317+
318+
// necessary for disambiguation:
319+
override def map[B](f: Value => B)(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] =
320+
super[SortedSet].map[B](f)
321+
override def flatMap[B](f: Value => IterableOnce[B])(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] =
322+
super[SortedSet].flatMap[B](f)
323+
override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(ValueSet.zipOrdMsg) ev: Ordering[(Value, B)]): immutable.SortedSet[(Value, B)] =
324+
super[SortedSet].zip[B](that)
325+
override def collect[B](pf: PartialFunction[Value, B])(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] =
326+
super[SortedSet].collect[B](pf)
327+
328+
@transient private[Enumeration] lazy val byName: Map[String, Value] = iterator.map( v => v.toString -> v).toMap
329+
}
330+
331+
/** A factory object for value sets */
332+
@SerialVersionUID(3L)
333+
object ValueSet extends SpecificIterableFactory[Value, ValueSet] {
334+
private final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Value] first by calling `unsorted`."
335+
private final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Value, ${B})]. You may want to upcast to a Set[Value] first by calling `unsorted`."
336+
337+
/** The empty value set */
338+
val empty: ValueSet = new ValueSet(immutable.BitSet.empty)
339+
/** A value set containing all the values for the zero-adjusted ids
340+
* corresponding to the bits in an array */
341+
def fromBitMask(elems: Array[Long]): ValueSet = new ValueSet(immutable.BitSet.fromBitMask(elems))
342+
/** A builder object for value sets */
343+
def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] {
344+
private[this] val b = new mutable.BitSet
345+
def addOne (x: Value) = { b += (x.id - bottomId); this }
346+
def clear() = b.clear()
347+
def result() = new ValueSet(b.toImmutable)
348+
}
349+
def fromSpecific(it: IterableOnce[Value]): ValueSet =
350+
newBuilder.addAll(it).result()
351+
}
352+
}

Diff for: ‎scala2-library-bootstrapped/src/scala/StringContext.scala

+476
Large diffs are not rendered by default.

Diff for: ‎scala2-library-bootstrapped/src/scala/collection/IterableOnce.scala

+1,516
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,314 @@
1+
/*
2+
* Scala (https://www.scala-lang.org)
3+
*
4+
* Copyright EPFL and Lightbend, Inc. dba Akka
5+
*
6+
* Licensed under Apache License 2.0
7+
* (http://www.apache.org/licenses/LICENSE-2.0).
8+
*
9+
* See the NOTICE file distributed with this work for
10+
* additional information regarding copyright ownership.
11+
*/
12+
13+
package scala
14+
package collection
15+
16+
import scala.annotation.{nowarn, tailrec}
17+
18+
/** Base trait for linearly accessed sequences that have efficient `head` and
19+
* `tail` operations.
20+
* Known subclasses: List, LazyList
21+
*/
22+
trait LinearSeq[+A] extends Seq[A]
23+
with LinearSeqOps[A, LinearSeq, LinearSeq[A]]
24+
with IterableFactoryDefaults[A, LinearSeq] {
25+
@nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
26+
override protected[this] def stringPrefix: String = "LinearSeq"
27+
28+
override def iterableFactory: SeqFactory[LinearSeq] = LinearSeq
29+
}
30+
31+
@SerialVersionUID(3L)
32+
object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq)
33+
34+
/** Base trait for linear Seq operations */
35+
trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] {
36+
37+
/** @inheritdoc
38+
*
39+
* Note: *Must* be overridden in subclasses. The default implementation that is inherited from [[SeqOps]]
40+
* uses `lengthCompare`, which is defined here to use `isEmpty`.
41+
*/
42+
override def isEmpty: Boolean
43+
44+
/** @inheritdoc
45+
*
46+
* Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]].
47+
*/
48+
def head: A
49+
50+
/** @inheritdoc
51+
*
52+
* Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]].
53+
*/
54+
def tail: C
55+
56+
override def headOption: Option[A] =
57+
if (isEmpty) None else Some(head)
58+
59+
def iterator: Iterator[A] =
60+
if (knownSize == 0) Iterator.empty
61+
else new LinearSeqIterator[A](this)
62+
63+
def length: Int = {
64+
var these = coll
65+
var len = 0
66+
while (these.nonEmpty) {
67+
len += 1
68+
these = these.tail
69+
}
70+
len
71+
}
72+
73+
override def last: A = {
74+
if (isEmpty) throw new NoSuchElementException("LinearSeq.last")
75+
else {
76+
var these = coll
77+
var scout = tail
78+
while (scout.nonEmpty) {
79+
these = scout
80+
scout = scout.tail
81+
}
82+
these.head
83+
}
84+
}
85+
86+
override def lengthCompare(len: Int): Int = {
87+
@tailrec def loop(i: Int, xs: LinearSeq[A]): Int = {
88+
if (i == len)
89+
if (xs.isEmpty) 0 else 1
90+
else if (xs.isEmpty)
91+
-1
92+
else
93+
loop(i + 1, xs.tail)
94+
}
95+
if (len < 0) 1
96+
else loop(0, coll)
97+
}
98+
99+
override def lengthCompare(that: Iterable[_]): Int = {
100+
val thatKnownSize = that.knownSize
101+
102+
if (thatKnownSize >= 0) this lengthCompare thatKnownSize
103+
else that match {
104+
case that: LinearSeq[_] =>
105+
var thisSeq = this
106+
var thatSeq = that
107+
while (thisSeq.nonEmpty && thatSeq.nonEmpty) {
108+
thisSeq = thisSeq.tail
109+
thatSeq = thatSeq.tail
110+
}
111+
java.lang.Boolean.compare(thisSeq.nonEmpty, thatSeq.nonEmpty)
112+
case _ =>
113+
var thisSeq = this
114+
val thatIt = that.iterator
115+
while (thisSeq.nonEmpty && thatIt.hasNext) {
116+
thisSeq = thisSeq.tail
117+
thatIt.next()
118+
}
119+
java.lang.Boolean.compare(thisSeq.nonEmpty, thatIt.hasNext)
120+
}
121+
}
122+
123+
override def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0
124+
125+
// `apply` is defined in terms of `drop`, which is in turn defined in
126+
// terms of `tail`.
127+
@throws[IndexOutOfBoundsException]
128+
override def apply(n: Int): A = {
129+
if (n < 0) throw new IndexOutOfBoundsException(n.toString)
130+
val skipped = drop(n)
131+
if (skipped.isEmpty) throw new IndexOutOfBoundsException(n.toString)
132+
skipped.head
133+
}
134+
135+
override def foreach[U](f: A => U): Unit = {
136+
var these: LinearSeq[A] = coll
137+
while (!these.isEmpty) {
138+
f(these.head)
139+
these = these.tail
140+
}
141+
}
142+
143+
override def forall(p: A => Boolean): Boolean = {
144+
var these: LinearSeq[A] = coll
145+
while (!these.isEmpty) {
146+
if (!p(these.head)) return false
147+
these = these.tail
148+
}
149+
true
150+
}
151+
152+
override def exists(p: A => Boolean): Boolean = {
153+
var these: LinearSeq[A] = coll
154+
while (!these.isEmpty) {
155+
if (p(these.head)) return true
156+
these = these.tail
157+
}
158+
false
159+
}
160+
161+
override def contains[A1 >: A](elem: A1): Boolean = {
162+
var these: LinearSeq[A] = coll
163+
while (!these.isEmpty) {
164+
if (these.head == elem) return true
165+
these = these.tail
166+
}
167+
false
168+
}
169+
170+
override def find(p: A => Boolean): Option[A] = {
171+
var these: LinearSeq[A] = coll
172+
while (!these.isEmpty) {
173+
if (p(these.head)) return Some(these.head)
174+
these = these.tail
175+
}
176+
None
177+
}
178+
179+
override def foldLeft[B](z: B)(op: (B, A) => B): B = {
180+
var acc = z
181+
var these: LinearSeq[A] = coll
182+
while (!these.isEmpty) {
183+
acc = op(acc, these.head)
184+
these = these.tail
185+
}
186+
acc
187+
}
188+
189+
override def sameElements[B >: A](that: IterableOnce[B]): Boolean = {
190+
@tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean =
191+
(a eq b) || {
192+
if (a.nonEmpty && b.nonEmpty && a.head == b.head) {
193+
linearSeqEq(a.tail, b.tail)
194+
}
195+
else {
196+
a.isEmpty && b.isEmpty
197+
}
198+
}
199+
200+
that match {
201+
case that: LinearSeq[B] => linearSeqEq(coll, that)
202+
case _ => super.sameElements(that)
203+
}
204+
}
205+
206+
override def segmentLength(p: A => Boolean, from: Int): Int = {
207+
var i = 0
208+
var seq = drop(from)
209+
while (seq.nonEmpty && p(seq.head)) {
210+
i += 1
211+
seq = seq.tail
212+
}
213+
i
214+
}
215+
216+
override def indexWhere(p: A => Boolean, from: Int): Int = {
217+
var i = math.max(from, 0)
218+
var these: LinearSeq[A] = this drop from
219+
while (these.nonEmpty) {
220+
if (p(these.head))
221+
return i
222+
223+
i += 1
224+
these = these.tail
225+
}
226+
-1
227+
}
228+
229+
override def lastIndexWhere(p: A => Boolean, end: Int): Int = {
230+
var i = 0
231+
var these: LinearSeq[A] = coll
232+
var last = -1
233+
while (!these.isEmpty && i <= end) {
234+
if (p(these.head)) last = i
235+
these = these.tail
236+
i += 1
237+
}
238+
last
239+
}
240+
241+
override def findLast(p: A => Boolean): Option[A] = {
242+
var these: LinearSeq[A] = coll
243+
var found = false
244+
var last: A = null.asInstanceOf[A] // don't use `Option`, to prevent excessive `Some` allocation
245+
while (these.nonEmpty) {
246+
val elem = these.head
247+
if (p(elem)) {
248+
found = true
249+
last = elem
250+
}
251+
these = these.tail
252+
}
253+
if (found) Some(last) else None
254+
}
255+
256+
override def tails: Iterator[C] = {
257+
val end = Iterator.single(empty)
258+
Iterator.iterate(coll)(_.tail).takeWhile(_.nonEmpty) ++ end
259+
}
260+
}
261+
262+
trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] {
263+
// A more efficient iterator implementation than the default LinearSeqIterator
264+
override def iterator: Iterator[A] = new AbstractIterator[A] {
265+
private[this] var current = StrictOptimizedLinearSeqOps.this
266+
def hasNext = !current.isEmpty
267+
def next() = { val r = current.head; current = current.tail; r }
268+
}
269+
270+
// Optimized version of `drop` that avoids copying
271+
override def drop(n: Int): C = {
272+
@tailrec def loop(n: Int, s: C): C =
273+
if (n <= 0 || s.isEmpty) s
274+
else loop(n - 1, s.tail)
275+
loop(n, coll)
276+
}
277+
278+
override def dropWhile(p: A => Boolean): C = {
279+
@tailrec def loop(s: C): C =
280+
if (s.nonEmpty && p(s.head)) loop(s.tail)
281+
else s
282+
loop(coll)
283+
}
284+
}
285+
286+
/** A specialized Iterator for LinearSeqs that is lazy enough for Stream and LazyList. This is accomplished by not
287+
* evaluating the tail after returning the current head.
288+
*/
289+
private[collection] final class LinearSeqIterator[A](coll: LinearSeqOps[A, LinearSeq, LinearSeq[A]]) extends AbstractIterator[A] {
290+
291+
// A call-by-need cell
292+
private[this] final class LazyCell(st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) {
293+
def this(outer: LinearSeqIterator[A], st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) = this(st)
294+
lazy val v = st
295+
}
296+
297+
private[this] var these: LazyCell = {
298+
// Reassign reference to avoid creating a private class field and holding a reference to the head.
299+
// LazyCell would otherwise close over `coll`.
300+
val initialHead = coll
301+
new LazyCell(initialHead)
302+
}
303+
304+
def hasNext: Boolean = these.v.nonEmpty
305+
306+
def next(): A =
307+
if (isEmpty) Iterator.empty.next()
308+
else {
309+
val cur = these.v
310+
val result = cur.head
311+
these = new LazyCell(cur.tail)
312+
result
313+
}
314+
}

Diff for: ‎scala2-library-bootstrapped/src/scala/collection/mutable/PriorityQueue.scala

+416
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,272 @@
1+
/*
2+
* Scala (https://www.scala-lang.org)
3+
*
4+
* Copyright EPFL and Lightbend, Inc. dba Akka
5+
*
6+
* Licensed under Apache License 2.0
7+
* (http://www.apache.org/licenses/LICENSE-2.0).
8+
*
9+
* See the NOTICE file distributed with this work for
10+
* additional information regarding copyright ownership.
11+
*/
12+
13+
package scala.concurrent
14+
15+
import java.util.concurrent.Executor
16+
import java.util.Objects
17+
import scala.util.control.NonFatal
18+
import scala.annotation.{switch, tailrec}
19+
20+
/**
21+
* Marker trait to indicate that a Runnable is Batchable by BatchingExecutors
22+
*/
23+
trait Batchable {
24+
self: Runnable =>
25+
}
26+
27+
private[concurrent] object BatchingExecutorStatics {
28+
final val emptyBatchArray: Array[Runnable] = new Array[Runnable](0)
29+
30+
// Max number of Runnables executed nested before starting to batch (to prevent stack exhaustion)
31+
final val syncPreBatchDepth = 16
32+
33+
// Max number of Runnables processed in one go (to prevent starvation of other tasks on the pool)
34+
final val runLimit = 1024
35+
36+
object MissingParentBlockContext extends BlockContext {
37+
override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T =
38+
try thunk finally throw new IllegalStateException("BUG in BatchingExecutor.Batch: parentBlockContext is null")
39+
}
40+
}
41+
42+
/**
43+
* Mixin trait for an Executor
44+
* which groups multiple nested `Runnable.run()` calls
45+
* into a single Runnable passed to the original
46+
* Executor. This can be a useful optimization
47+
* because it bypasses the original context's task
48+
* queue and keeps related (nested) code on a single
49+
* thread which may improve CPU affinity. However,
50+
* if tasks passed to the Executor are blocking
51+
* or expensive, this optimization can prevent work-stealing
52+
* and make performance worse.
53+
* A batching executor can create deadlocks if code does
54+
* not use `scala.concurrent.blocking` when it should,
55+
* because tasks created within other tasks will block
56+
* on the outer task completing.
57+
* This executor may run tasks in any order, including LIFO order.
58+
* There are no ordering guarantees.
59+
*
60+
* WARNING: Only use *EITHER* `submitAsyncBatched` OR `submitSyncBatched`!!
61+
*
62+
* When you implement this trait for async executors like thread pools,
63+
* you're going to need to implement it something like the following:
64+
*
65+
* {{{
66+
* final override def submitAsync(runnable: Runnable): Unit =
67+
* super[SuperClass].execute(runnable) // To prevent reentrancy into `execute`
68+
*
69+
* final override def execute(runnable: Runnable): Unit =
70+
* if (runnable.isInstanceOf[Batchable]) // Or other logic
71+
* submitAsyncBatched(runnable)
72+
* else
73+
* submitAsync(runnable)
74+
*
75+
* final override def reportFailure(cause: Throwable): Unit = …
76+
* }}}
77+
*
78+
* And if you want to implement if for a sync, trampolining, executor you're
79+
* going to implement it something like this:
80+
*
81+
* {{{
82+
* final override def submitAsync(runnable: Runnable): Unit = ()
83+
*
84+
* final override def execute(runnable: Runnable): Unit =
85+
* submitSyncBatched(runnable) // You typically will want to batch everything
86+
*
87+
* final override def reportFailure(cause: Throwable): Unit =
88+
* ExecutionContext.defaultReporter(cause) // Or choose something more fitting
89+
* }}}
90+
*
91+
*/
92+
private[concurrent] trait BatchingExecutor extends Executor {
93+
private[this] final val _tasksLocal = new ThreadLocal[AnyRef]()
94+
95+
/*
96+
* Batch implements a LIFO queue (stack) and is used as a trampolining Runnable.
97+
* In order to conserve allocations, the first element in the batch is stored "unboxed" in
98+
* the `first` field. Subsequent Runnables are stored in the array called `other`.
99+
*/
100+
private[this] sealed abstract class AbstractBatch protected (protected final var first: Runnable, protected final var other: Array[Runnable], protected final var size: Int) {
101+
102+
def this(outer: BatchingExecutor, first: Runnable, other: Array[Runnable], size: Int) = this(first, other, size)
103+
104+
private[this] final def ensureCapacity(curSize: Int): Array[Runnable] = {
105+
val curOther = this.other
106+
val curLen = curOther.length
107+
if (curSize <= curLen) curOther
108+
else {
109+
val newLen = if (curLen == 0) 4 else curLen << 1
110+
111+
if (newLen <= curLen) throw new StackOverflowError("Space limit of asynchronous stack reached: " + curLen)
112+
val newOther = new Array[Runnable](newLen)
113+
System.arraycopy(curOther, 0, newOther, 0, curLen)
114+
this.other = newOther
115+
newOther
116+
}
117+
}
118+
119+
final def push(r: Runnable): Unit = {
120+
val sz = this.size
121+
if(sz == 0)
122+
this.first = r
123+
else
124+
ensureCapacity(sz)(sz - 1) = r
125+
this.size = sz + 1
126+
}
127+
128+
@tailrec protected final def runN(n: Int): Unit =
129+
if (n > 0)
130+
(this.size: @switch) match {
131+
case 0 =>
132+
case 1 =>
133+
val next = this.first
134+
this.first = null
135+
this.size = 0
136+
next.run()
137+
runN(n - 1)
138+
case sz =>
139+
val o = this.other
140+
val next = o(sz - 2)
141+
o(sz - 2) = null
142+
this.size = sz - 1
143+
next.run()
144+
runN(n - 1)
145+
}
146+
}
147+
148+
private[this] final class AsyncBatch private(_first: Runnable, _other: Array[Runnable], _size: Int) extends AbstractBatch(_first, _other, _size) with Runnable with BlockContext with (BlockContext => Throwable) {
149+
private[this] final var parentBlockContext: BlockContext = BatchingExecutorStatics.MissingParentBlockContext
150+
151+
final def this(runnable: Runnable) = this(runnable, BatchingExecutorStatics.emptyBatchArray, 1)
152+
153+
override final def run(): Unit = {
154+
_tasksLocal.set(this) // This is later cleared in `apply` or `runWithoutResubmit`
155+
156+
val f = resubmit(BlockContext.usingBlockContext(this)(this))
157+
158+
if (f != null)
159+
throw f
160+
}
161+
162+
/* LOGIC FOR ASYNCHRONOUS BATCHES */
163+
override final def apply(prevBlockContext: BlockContext): Throwable = try {
164+
parentBlockContext = prevBlockContext
165+
runN(BatchingExecutorStatics.runLimit)
166+
null
167+
} catch {
168+
case t: Throwable => t // We are handling exceptions on the outside of this method
169+
} finally {
170+
parentBlockContext = BatchingExecutorStatics.MissingParentBlockContext
171+
_tasksLocal.remove()
172+
}
173+
174+
/* Attempts to resubmit this Batch to the underlying ExecutionContext,
175+
* this only happens for Batches where `resubmitOnBlock` is `true`.
176+
* Only attempt to resubmit when there are `Runnables` left to process.
177+
* Note that `cause` can be `null`.
178+
*/
179+
private[this] final def resubmit(cause: Throwable): Throwable =
180+
if (this.size > 0) {
181+
try { submitForExecution(this); cause } catch {
182+
case inner: Throwable =>
183+
if (NonFatal(inner)) {
184+
val e = new ExecutionException("Non-fatal error occurred and resubmission failed, see suppressed exception.", cause)
185+
e.addSuppressed(inner)
186+
e
187+
} else inner
188+
}
189+
} else cause // TODO: consider if NonFatals should simply be `reportFailure`:ed rather than rethrown
190+
191+
private[this] final def cloneAndClear(): AsyncBatch = {
192+
val newBatch = new AsyncBatch(this.first, this.other, this.size)
193+
this.first = null
194+
this.other = BatchingExecutorStatics.emptyBatchArray
195+
this.size = 0
196+
newBatch
197+
}
198+
199+
override final def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
200+
// If we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
201+
if(this.size > 0)
202+
submitForExecution(cloneAndClear()) // If this throws then we have bigger problems
203+
204+
parentBlockContext.blockOn(thunk) // Now delegate the blocking to the previous BC
205+
}
206+
}
207+
208+
private[this] final class SyncBatch(runnable: Runnable) extends AbstractBatch(runnable, BatchingExecutorStatics.emptyBatchArray, 1) with Runnable {
209+
@tailrec override final def run(): Unit = {
210+
try runN(BatchingExecutorStatics.runLimit) catch {
211+
case ie: InterruptedException =>
212+
reportFailure(ie) // TODO: Handle InterruptedException differently?
213+
case f if NonFatal(f) =>
214+
reportFailure(f)
215+
}
216+
217+
if (this.size > 0)
218+
run()
219+
}
220+
}
221+
222+
/** MUST throw a NullPointerException when `runnable` is null
223+
* When implementing a sync BatchingExecutor, it is RECOMMENDED
224+
* to implement this method as `runnable.run()`
225+
*/
226+
protected def submitForExecution(runnable: Runnable): Unit
227+
228+
/** Reports that an asynchronous computation failed.
229+
* See `ExecutionContext.reportFailure(throwable: Throwable)`
230+
*/
231+
protected def reportFailure(throwable: Throwable): Unit
232+
233+
/**
234+
* WARNING: Never use both `submitAsyncBatched` and `submitSyncBatched` in the same
235+
* implementation of `BatchingExecutor`
236+
*/
237+
protected final def submitAsyncBatched(runnable: Runnable): Unit = {
238+
val b = _tasksLocal.get
239+
if (b.isInstanceOf[AsyncBatch]) b.asInstanceOf[AsyncBatch].push(runnable)
240+
else submitForExecution(new AsyncBatch(runnable))
241+
}
242+
243+
/**
244+
* WARNING: Never use both `submitAsyncBatched` and `submitSyncBatched` in the same
245+
* implementation of `BatchingExecutor`
246+
*/
247+
protected final def submitSyncBatched(runnable: Runnable): Unit = {
248+
Objects.requireNonNull(runnable, "runnable is null")
249+
val tl = _tasksLocal
250+
val b = tl.get
251+
if (b.isInstanceOf[SyncBatch]) b.asInstanceOf[SyncBatch].push(runnable)
252+
else {
253+
val i = if (b ne null) b.asInstanceOf[java.lang.Integer].intValue else 0
254+
if (i < BatchingExecutorStatics.syncPreBatchDepth) {
255+
tl.set(java.lang.Integer.valueOf(i + 1))
256+
try submitForExecution(runnable) // User code so needs to be try-finally guarded here
257+
catch {
258+
case ie: InterruptedException =>
259+
reportFailure(ie) // TODO: Handle InterruptedException differently?
260+
case f if NonFatal(f) =>
261+
reportFailure(f)
262+
}
263+
finally tl.set(b)
264+
} else {
265+
val batch = new SyncBatch(runnable)
266+
tl.set(batch)
267+
submitForExecution(batch)
268+
tl.set(b) // Batch only throws fatals so no need for try-finally here
269+
}
270+
}
271+
}
272+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
/*
2+
* Scala (https://www.scala-lang.org)
3+
*
4+
* Copyright EPFL and Lightbend, Inc. dba Akka
5+
*
6+
* Licensed under Apache License 2.0
7+
* (http://www.apache.org/licenses/LICENSE-2.0).
8+
*
9+
* See the NOTICE file distributed with this work for
10+
* additional information regarding copyright ownership.
11+
*/
12+
13+
package scala.concurrent
14+
15+
/** This class provides a simple FIFO queue of data objects,
16+
* which are read by one or more reader threads.
17+
*
18+
* @tparam A type of data exchanged
19+
*/
20+
@deprecated("Use `java.util.concurrent.LinkedTransferQueue` instead.", since = "2.13.0")
21+
class Channel[A] {
22+
private class LinkedList {
23+
def this(outer: Channel[A]) = this()
24+
25+
var elem: A = _
26+
var next: LinkedList = _
27+
}
28+
private[this] var written = new LinkedList // FIFO queue, realized through
29+
private[this] var lastWritten = written // aliasing of a linked list
30+
private[this] var nreaders = 0
31+
32+
/** Append a value to the FIFO queue to be read by `read`.
33+
* This operation is nonblocking and can be executed by any thread.
34+
*
35+
* @param x object to enqueue to this channel
36+
*/
37+
def write(x: A): Unit = synchronized {
38+
lastWritten.elem = x
39+
lastWritten.next = new LinkedList
40+
lastWritten = lastWritten.next
41+
if (nreaders > 0) notify()
42+
}
43+
44+
/** Retrieve the next waiting object from the FIFO queue,
45+
* blocking if necessary until an object is available.
46+
*
47+
* @return next object dequeued from this channel
48+
*/
49+
def read: A = synchronized {
50+
while (written.next == null) {
51+
try {
52+
nreaders += 1
53+
wait()
54+
}
55+
finally nreaders -= 1
56+
}
57+
val x = written.elem
58+
written = written.next
59+
x
60+
}
61+
}
+382
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,382 @@
1+
/*
2+
* Scala (https://www.scala-lang.org)
3+
*
4+
* Copyright EPFL and Lightbend, Inc. dba Akka
5+
*
6+
* Licensed under Apache License 2.0
7+
* (http://www.apache.org/licenses/LICENSE-2.0).
8+
*
9+
* See the NOTICE file distributed with this work for
10+
* additional information regarding copyright ownership.
11+
*/
12+
13+
package scala
14+
package io
15+
16+
import scala.collection.{AbstractIterator, BufferedIterator}
17+
import java.io.{Closeable, FileInputStream, FileNotFoundException, InputStream, PrintStream, File => JFile}
18+
import java.net.{URI, URL}
19+
20+
import scala.annotation.nowarn
21+
22+
/** This object provides convenience methods to create an iterable
23+
* representation of a source file.
24+
*/
25+
object Source {
26+
val DefaultBufSize = 2048
27+
28+
/** Creates a `Source` from System.in.
29+
*/
30+
def stdin = fromInputStream(System.in)
31+
32+
/** Creates a Source from an Iterable.
33+
*
34+
* @param iterable the Iterable
35+
* @return the Source
36+
*/
37+
def fromIterable(iterable: Iterable[Char]): Source = new Source {
38+
val iter = iterable.iterator
39+
} withReset(() => fromIterable(iterable))
40+
41+
/** Creates a Source instance from a single character.
42+
*/
43+
def fromChar(c: Char): Source = fromIterable(Array(c))
44+
45+
/** creates Source from array of characters, with empty description.
46+
*/
47+
def fromChars(chars: Array[Char]): Source = fromIterable(chars)
48+
49+
/** creates Source from a String, with no description.
50+
*/
51+
def fromString(s: String): Source = fromIterable(s)
52+
53+
/** creates Source from file with given name, setting its description to
54+
* filename.
55+
*/
56+
def fromFile(name: String)(implicit codec: Codec): BufferedSource =
57+
fromFile(new JFile(name))(codec)
58+
59+
/** creates Source from file with given name, using given encoding, setting
60+
* its description to filename.
61+
*/
62+
def fromFile(name: String, enc: String): BufferedSource =
63+
fromFile(name)(Codec(enc))
64+
65+
/** creates `source` from file with given file `URI`.
66+
*/
67+
def fromFile(uri: URI)(implicit codec: Codec): BufferedSource =
68+
fromFile(new JFile(uri))(codec)
69+
70+
/** creates Source from file with given file: URI
71+
*/
72+
def fromFile(uri: URI, enc: String): BufferedSource =
73+
fromFile(uri)(Codec(enc))
74+
75+
/** creates Source from file, using default character encoding, setting its
76+
* description to filename.
77+
*/
78+
def fromFile(file: JFile)(implicit codec: Codec): BufferedSource =
79+
fromFile(file, Source.DefaultBufSize)(codec)
80+
81+
/** same as fromFile(file, enc, Source.DefaultBufSize)
82+
*/
83+
def fromFile(file: JFile, enc: String): BufferedSource =
84+
fromFile(file)(Codec(enc))
85+
86+
def fromFile(file: JFile, enc: String, bufferSize: Int): BufferedSource =
87+
fromFile(file, bufferSize)(Codec(enc))
88+
89+
/** Creates Source from `file`, using given character encoding, setting
90+
* its description to filename. Input is buffered in a buffer of size
91+
* `bufferSize`.
92+
*/
93+
def fromFile(file: JFile, bufferSize: Int)(implicit codec: Codec): BufferedSource = {
94+
val inputStream = new FileInputStream(file)
95+
96+
createBufferedSource(
97+
inputStream,
98+
bufferSize,
99+
() => fromFile(file, bufferSize)(codec),
100+
() => inputStream.close()
101+
)(codec) withDescription s"file:${file.getAbsolutePath}"
102+
}
103+
104+
/** Create a `Source` from array of bytes, decoding
105+
* the bytes according to codec.
106+
*
107+
* @return the created `Source` instance.
108+
*/
109+
def fromBytes(bytes: Array[Byte])(implicit codec: Codec): Source =
110+
fromString(new String(bytes, codec.name))
111+
112+
def fromBytes(bytes: Array[Byte], enc: String): Source =
113+
fromBytes(bytes)(Codec(enc))
114+
115+
/** Create a `Source` from array of bytes, assuming
116+
* one byte per character (ISO-8859-1 encoding.)
117+
*/
118+
@deprecated("Use `fromBytes` and specify an encoding", since="2.13.9")
119+
def fromRawBytes(bytes: Array[Byte]): Source =
120+
fromString(new String(bytes, Codec.ISO8859.charSet))
121+
122+
/** creates `Source` from file with given file: URI
123+
*/
124+
def fromURI(uri: URI)(implicit codec: Codec): BufferedSource =
125+
fromFile(new JFile(uri))(codec)
126+
127+
/** same as fromURL(new URL(s))(Codec(enc))
128+
*/
129+
def fromURL(s: String, enc: String): BufferedSource =
130+
fromURL(s)(Codec(enc))
131+
132+
/** same as fromURL(new URL(s))
133+
*/
134+
def fromURL(s: String)(implicit codec: Codec): BufferedSource =
135+
fromURL(new URI(s).toURL)(codec)
136+
137+
/** same as fromInputStream(url.openStream())(Codec(enc))
138+
*/
139+
def fromURL(url: URL, enc: String): BufferedSource =
140+
fromURL(url)(Codec(enc))
141+
142+
/** same as fromInputStream(url.openStream())(codec)
143+
*/
144+
def fromURL(url: URL)(implicit codec: Codec): BufferedSource =
145+
fromInputStream(url.openStream())(codec)
146+
147+
/** Reads data from inputStream with a buffered reader, using the encoding
148+
* in implicit parameter codec.
149+
*
150+
* @param inputStream the input stream from which to read
151+
* @param bufferSize buffer size (defaults to Source.DefaultBufSize)
152+
* @param reset a () => Source which resets the stream (if unset, reset() will throw an Exception)
153+
* @param close a () => Unit method which closes the stream (if unset, close() will do nothing)
154+
* @param codec (implicit) a scala.io.Codec specifying behavior (defaults to Codec.default)
155+
* @return the buffered source
156+
*/
157+
def createBufferedSource(
158+
inputStream: InputStream,
159+
bufferSize: Int = DefaultBufSize,
160+
reset: () => Source = null,
161+
close: () => Unit = null
162+
)(implicit codec: Codec): BufferedSource = {
163+
// workaround for default arguments being unable to refer to other parameters
164+
val resetFn = if (reset == null) () => createBufferedSource(inputStream, bufferSize, reset, close)(codec) else reset
165+
166+
new BufferedSource(inputStream, bufferSize)(codec) withReset resetFn withClose close
167+
}
168+
169+
def fromInputStream(is: InputStream, enc: String): BufferedSource =
170+
fromInputStream(is)(Codec(enc))
171+
172+
def fromInputStream(is: InputStream)(implicit codec: Codec): BufferedSource =
173+
createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec)
174+
175+
/** Reads data from a classpath resource, using either a context classloader (default) or a passed one.
176+
*
177+
* @param resource name of the resource to load from the classpath
178+
* @param classLoader classloader to be used, or context classloader if not specified
179+
* @return the buffered source
180+
*/
181+
def fromResource(resource: String, classLoader: ClassLoader = Thread.currentThread().getContextClassLoader())(implicit codec: Codec): BufferedSource =
182+
Option(classLoader.getResourceAsStream(resource)) match {
183+
case Some(in) => fromInputStream(in)
184+
case None => throw new FileNotFoundException(s"resource '$resource' was not found in the classpath from the given classloader.")
185+
}
186+
187+
}
188+
189+
/** An iterable representation of source data.
190+
* It may be reset with the optional [[reset]] method.
191+
*
192+
* Subclasses must supply [[scala.io.Source.iter the underlying iterator]].
193+
*
194+
* Error handling may be customized by overriding the [[scala.io.Source.report report]] method.
195+
*
196+
* The [[scala.io.Source.ch current input]] and [[scala.io.Source.pos position]],
197+
* as well as the [[scala.io.Source.next next character]] methods delegate to
198+
* [[scala.io.Source#Positioner the positioner]].
199+
*
200+
* The default positioner encodes line and column numbers in the position passed to [[report]].
201+
* This behavior can be changed by supplying a
202+
* [[scala.io.Source.withPositioning(pos:* custom positioner]].
203+
*
204+
*/
205+
abstract class Source extends Iterator[Char] with Closeable {
206+
/** the actual iterator */
207+
protected val iter: Iterator[Char]
208+
209+
// ------ public values
210+
211+
/** description of this source, default empty */
212+
var descr: String = ""
213+
var nerrors = 0
214+
var nwarnings = 0
215+
216+
private def lineNum(line: Int): String = (getLines() drop (line - 1) take 1).mkString
217+
218+
class LineIterator extends AbstractIterator[String] with Iterator[String] {
219+
private[this] val sb = new StringBuilder
220+
221+
lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
222+
def isNewline(ch: Char): Boolean = ch == '\r' || ch == '\n'
223+
def getc(): Boolean = iter.hasNext && {
224+
val ch = iter.next()
225+
if (ch == '\n') false
226+
else if (ch == '\r') {
227+
if (iter.hasNext && iter.head == '\n')
228+
iter.next()
229+
230+
false
231+
}
232+
else {
233+
sb append ch
234+
true
235+
}
236+
}
237+
def hasNext: Boolean = iter.hasNext
238+
def next(): String = {
239+
sb.clear()
240+
while (getc()) { }
241+
sb.toString
242+
}
243+
}
244+
245+
/** Returns an iterator who returns lines (NOT including newline character(s)).
246+
* It will treat any of \r\n, \r, or \n as a line separator (longest match) - if
247+
* you need more refined behavior you can subclass Source#LineIterator directly.
248+
*/
249+
def getLines(): Iterator[String] = new LineIterator()
250+
251+
/** Returns `'''true'''` if this source has more characters.
252+
*/
253+
def hasNext: Boolean = iter.hasNext
254+
255+
/** Returns next character.
256+
*/
257+
def next(): Char = positioner.next()
258+
259+
@nowarn("cat=deprecation")
260+
class Positioner(encoder: Position) {
261+
def this() = this(RelaxedPosition)
262+
/** the last character returned by next. */
263+
var ch: Char = _
264+
265+
/** position of last character returned by next */
266+
var pos = 0
267+
268+
/** current line and column */
269+
var cline = 1
270+
var ccol = 1
271+
272+
/** default col increment for tabs '\t', set to 4 initially */
273+
var tabinc = 4
274+
275+
def next(): Char = {
276+
ch = iter.next()
277+
pos = encoder.encode(cline, ccol)
278+
ch match {
279+
case '\n' =>
280+
ccol = 1
281+
cline += 1
282+
case '\t' =>
283+
ccol += tabinc
284+
case _ =>
285+
ccol += 1
286+
}
287+
ch
288+
}
289+
}
290+
/** A Position implementation which ignores errors in
291+
* the positions.
292+
*/
293+
@nowarn("cat=deprecation")
294+
object RelaxedPosition extends Position {
295+
private val _ = Source.this
296+
def checkInput(line: Int, column: Int): Unit = ()
297+
}
298+
object RelaxedPositioner extends Positioner(RelaxedPosition) { }
299+
object NoPositioner extends Positioner(Position) {
300+
override def next(): Char = iter.next()
301+
}
302+
def ch: Char = positioner.ch
303+
def pos: Int = positioner.pos
304+
305+
/** Reports an error message to the output stream `out`.
306+
*
307+
* @param pos the source position (line/column)
308+
* @param msg the error message to report
309+
* @param out PrintStream to use (optional: defaults to `Console.err`)
310+
*/
311+
def reportError(
312+
pos: Int,
313+
msg: String,
314+
out: PrintStream = Console.err): Unit =
315+
{
316+
nerrors += 1
317+
report(pos, msg, out)
318+
}
319+
320+
private def spaces(n: Int) = List.fill(n)(' ').mkString
321+
/**
322+
* @param pos the source position (line/column)
323+
* @param msg the error message to report
324+
* @param out PrintStream to use
325+
*/
326+
def report(pos: Int, msg: String, out: PrintStream): Unit = {
327+
val line = Position line pos
328+
val col = Position column pos
329+
330+
out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, lineNum(line), spaces(col - 1))
331+
}
332+
333+
/**
334+
* @param pos the source position (line/column)
335+
* @param msg the warning message to report
336+
* @param out PrintStream to use (optional: defaults to `Console.out`)
337+
*/
338+
def reportWarning(
339+
pos: Int,
340+
msg: String,
341+
out: PrintStream = Console.out): Unit =
342+
{
343+
nwarnings += 1
344+
report(pos, "warning! " + msg, out)
345+
}
346+
347+
private[this] var resetFunction: () => Source = null
348+
private[this] var closeFunction: () => Unit = null
349+
private[this] var positioner: Positioner = RelaxedPositioner
350+
351+
def withReset(f: () => Source): this.type = {
352+
resetFunction = f
353+
this
354+
}
355+
def withClose(f: () => Unit): this.type = {
356+
closeFunction = f
357+
this
358+
}
359+
def withDescription(text: String): this.type = {
360+
descr = text
361+
this
362+
}
363+
/** Change or disable the positioner. */
364+
def withPositioning(on: Boolean): this.type = {
365+
positioner = if (on) RelaxedPositioner else NoPositioner
366+
this
367+
}
368+
def withPositioning(pos: Positioner): this.type = {
369+
positioner = pos
370+
this
371+
}
372+
373+
/** The close() method closes the underlying resource. */
374+
def close(): Unit = {
375+
if (closeFunction != null) closeFunction()
376+
}
377+
378+
/** The reset() method creates a fresh copy of this Source. */
379+
def reset(): Source =
380+
if (resetFunction != null) resetFunction()
381+
else throw new UnsupportedOperationException("Source's reset() method was not set.")
382+
}

0 commit comments

Comments
 (0)
Please sign in to comment.