diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 6e7ec32..c902d90 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -18,9 +18,9 @@ jobs: uses: playframework/.github/.github/workflows/cmd.yml@v3 with: java: 17, 11, 8 - scala: 2.12.20 + scala: 2.12.20, 3.3.4 cmd: | - sbt ++$MATRIX_SCALA test ^scripted + sbt ++$MATRIX_SCALA test scripted finish: name: Finish diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 9a21f0f..69cedc4 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -18,6 +18,6 @@ jobs: persist-credentials: false - name: Check project is formatted - uses: jrouly/scalafmt-native-action@v3 + uses: jrouly/scalafmt-native-action@v4 with: arguments: '--list --mode diff-ref=origin/main' diff --git a/.scalafmt.conf b/.scalafmt.conf index dc3ec7d..b947232 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -24,3 +24,8 @@ align.tokens."+" = [ ] } ] +fileOverride { + "glob:**/scala-3/**" { + runner.dialect = scala3 + } +} diff --git a/build.sbt b/build.sbt index 0f3d7ec..79f6171 100644 --- a/build.sbt +++ b/build.sbt @@ -11,6 +11,10 @@ developers += Developer( url("https://github.com/playframework") ) +lazy val scala212 = "2.12.20" +lazy val scala3 = "3.3.4" +ThisBuild / crossScalaVersions := Seq(scala212, scala3) + libraryDependencies ++= Seq( "org.webjars" % "webjars-locator-core" % "0.59", "org.specs2" %% "specs2-core" % "4.20.8" % "test", @@ -26,3 +30,17 @@ Global / onLoad := (Global / onLoad).value.andThen { s => dynverAssertTagVersion.value s } + +(pluginCrossBuild / sbtVersion) := { + scalaBinaryVersion.value match { + case "2.12" => "1.10.2" + case _ => "2.0.0-M2" + } +} + +scalacOptions := { + CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, major)) => Seq("-Xsource:3") + case _ => Seq.empty + } +} diff --git a/project/build.properties b/project/build.properties index ee4c672..0b699c3 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.10.1 +sbt.version=1.10.2 diff --git a/sbt-web-tester/project/build.properties b/sbt-web-tester/project/build.properties index ee4c672..0b699c3 100644 --- a/sbt-web-tester/project/build.properties +++ b/sbt-web-tester/project/build.properties @@ -1 +1 @@ -sbt.version=1.10.1 +sbt.version=1.10.2 diff --git a/src/main/scala-2.12/com/typesafe/sbt/PluginCompat.scala b/src/main/scala-2.12/com/typesafe/sbt/PluginCompat.scala new file mode 100644 index 0000000..e1ac30f --- /dev/null +++ b/src/main/scala-2.12/com/typesafe/sbt/PluginCompat.scala @@ -0,0 +1,31 @@ +package com.typesafe.sbt + +import sbt.* +import sbt.Keys.Classpath +import xsbti.FileConverter + +import java.nio.file.{ Path => NioPath } + +private[sbt] object PluginCompat { + type FileRef = java.io.File + type Out = java.io.File + + def toNioPath(a: Attributed[File])(implicit conv: FileConverter): NioPath = + a.data.toPath + def toFile(a: Attributed[File])(implicit conv: FileConverter): File = + a.data + def toNioPaths(cp: Seq[Attributed[File]])(implicit conv: FileConverter): Vector[NioPath] = + cp.map(_.data.toPath()).toVector + def toFiles(cp: Seq[Attributed[File]])(implicit conv: FileConverter): Vector[File] = + cp.map(_.data).toVector + def toSet[A](iterable: Iterable[A]): Set[A] = iterable.to[Set] + def classpathToFiles(classpath: Classpath)(implicit conv: FileConverter): Seq[FileRef] = + classpath.files + def toKey(settingKey: SettingKey[String]): AttributeKey[String] = settingKey.key + def toNioPath(f: File)(implicit conv: FileConverter): NioPath = + f.toPath + def toFile(f: File)(implicit conv: FileConverter): File = f + def toFileRef(f: File)(implicit conv: FileConverter): FileRef = f + def selectFirstPredicate: Seq[FileRef] => Boolean = files => + files.forall(_.isFile) && files.map(_.hashString).distinct.size == 1 +} diff --git a/src/main/scala-3/com/typesafe/sbt/PluginCompat.scala b/src/main/scala-3/com/typesafe/sbt/PluginCompat.scala new file mode 100644 index 0000000..6205cdf --- /dev/null +++ b/src/main/scala-3/com/typesafe/sbt/PluginCompat.scala @@ -0,0 +1,33 @@ +package com.typesafe.sbt + +import java.nio.file.{ Path => NioPath } +import java.io.{ File => IoFile } +import sbt.* +import sbt.Keys.Classpath +import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile } + +private[sbt] object PluginCompat: + type FileRef = HashedVirtualFileRef + type Out = VirtualFile + + def toNioPath(a: Attributed[HashedVirtualFileRef])(using conv: FileConverter): NioPath = + conv.toPath(a.data) + inline def toFile(a: Attributed[HashedVirtualFileRef])(using conv: FileConverter): File = + toNioPath(a).toFile + def toNioPaths(cp: Seq[Attributed[HashedVirtualFileRef]])(using conv: FileConverter): Vector[NioPath] = + cp.map(toNioPath).toVector + inline def toFiles(cp: Seq[Attributed[HashedVirtualFileRef]])(using conv: FileConverter): Vector[File] = + toNioPaths(cp).map(_.toFile) + def toSet[A](iterable: Iterable[A]): Set[A] = iterable.to(Set) + inline def classpathToFiles(classpath: Classpath)(using conv: FileConverter): Seq[File] = + toFiles(classpath.to(Seq)) + inline def toKey(settingKey: SettingKey[String]): StringAttributeKey = StringAttributeKey(settingKey.key.label) + def toNioPath(hvf: HashedVirtualFileRef)(using conv: FileConverter): NioPath = + conv.toPath(hvf) + def toFile(hvf: HashedVirtualFileRef)(using conv: FileConverter): File = + toNioPath(hvf).toFile + inline def toFileRef(file: File)(using conv: FileConverter): FileRef = + conv.toVirtualFile(file.toPath) + inline def selectFirstPredicate(using conv: FileConverter): Seq[FileRef] => Boolean = files => + files.forall(toFile(_).isFile) && files.map(_.contentHashStr).distinct.size == 1 +end PluginCompat diff --git a/src/main/scala/com/typesafe/sbt/web/SbtWeb.scala b/src/main/scala/com/typesafe/sbt/web/SbtWeb.scala index 3b012e4..94beb8c 100644 --- a/src/main/scala/com/typesafe/sbt/web/SbtWeb.scala +++ b/src/main/scala/com/typesafe/sbt/web/SbtWeb.scala @@ -1,15 +1,18 @@ package com.typesafe.sbt.web -import sbt._ +import sbt.{ Def, given, * } import sbt.internal.inc.Analysis import sbt.internal.io.Source -import sbt.Keys._ +import sbt.Keys.* import sbt.Defaults.relativeMappings +import scala.language.implicitConversions import org.webjars.WebJarExtractor import org.webjars.WebJarAssetLocator.WEBJARS_PATH_PREFIX import com.typesafe.sbt.web.pipeline.Pipeline -import com.typesafe.sbt.web.incremental.{ OpResult, OpSuccess } -import xsbti.Reporter +import com.typesafe.sbt.web.incremental.{ OpResult, OpSuccess, toStringInputHasher } +import xsbti.{ Reporter, FileConverter } + +import com.typesafe.sbt.PluginCompat.* object Import { @@ -125,16 +128,16 @@ object Import { * {{{ * + src * --+ main - * ----+ assets .....(sourceDirectory in Assets) + * ----+ assets .....(Assets / sourceDirectory) * ------+ js - * ----+ public .....(resourceDirectory in Assets) + * ----+ public .....(Assets / resourceDirectory) * ------+ css * ------+ images * ------+ js * --+ test - * ----+ assets .....(sourceDirectory in TestAssets) + * ----+ assets .....(TestAssets / sourceDirectory) * ------+ js - * ----+ public .....(resourceDirectory in TestAssets) + * ----+ public .....(TestAssets / resourceDirectory) * ------+ css * ------+ images * ------+ js @@ -178,12 +181,13 @@ object SbtWeb extends AutoPlugin { override def requires = sbt.plugins.JvmPlugin - import autoImport._ - import WebKeys._ + import autoImport.* + import WebKeys.* - override def projectConfigurations = super.projectConfigurations ++ Seq(Assets, TestAssets, Plugin) + override def projectConfigurations: Seq[Configuration] = + super.projectConfigurations ++ Seq(Assets, TestAssets, Plugin) - override def buildSettings: Seq[Def.Setting[_]] = Seq( + override def buildSettings: Seq[Def.Setting[?]] = Seq( (Plugin / nodeModuleDirectory) := (Plugin / target).value / "node-modules", (Plugin / nodeModules / webJarsCache) := (Plugin / target).value / "webjars-plugin.cache", (Plugin / webJarsClassLoader) := SbtWeb.getClass.getClassLoader, @@ -199,7 +203,7 @@ object SbtWeb extends AutoPlugin { ) ) ++ inConfig(Plugin)(nodeModulesSettings) - override def projectSettings: Seq[Setting[_]] = Seq( + override def projectSettings: Seq[Setting[?]] = Seq( reporter := new CompileProblems.LoggerReporter(5, streams.value.log), webTarget := target.value / "web", (Assets / sourceDirectory) := (Compile / sourceDirectory).value / "assets", @@ -230,8 +234,8 @@ object SbtWeb extends AutoPlugin { (TestAssets / webJars / webJarsCache) := webTarget.value / "web-modules" / "webjars-test.cache", (Assets / nodeModules / webJarsCache) := webTarget.value / "node-modules" / "webjars-main.cache", (TestAssets / nodeModules / webJarsCache) := webTarget.value / "node-modules" / "webjars-test.cache", - (Assets / webJarsClassLoader) := classLoader((Compile / dependencyClasspath).value), - (TestAssets / webJarsClassLoader) := classLoader((Test / dependencyClasspath).value), + (Assets / webJarsClassLoader) := classLoader((Compile / dependencyClasspath).value, fileConverter.value), + (TestAssets / webJarsClassLoader) := classLoader((Test / dependencyClasspath).value, fileConverter.value), assets := (Assets / assets).value, (Compile / packageBin / mappings) ++= { if ((Assets / addExportedMappingsToPackageBinMappings).value) { @@ -266,19 +270,20 @@ object SbtWeb extends AutoPlugin { allPipelineStages := Pipeline.chain(pipelineStages).value, pipeline := allPipelineStages.value((Assets / mappings).value), deduplicators := Nil, - pipeline := deduplicateMappings(pipeline.value, deduplicators.value), + pipeline := deduplicateMappings(pipeline.value, deduplicators.value, fileConverter.value), stagingDirectory := webTarget.value / "stage", stage := syncMappings( streams.value.cacheStoreFactory.make("sync-stage"), pipeline.value, - stagingDirectory.value + stagingDirectory.value, + fileConverter.value ) ) ++ inConfig(Assets)(unscopedAssetSettings) ++ inConfig(Assets)(nodeModulesSettings) ++ inConfig(TestAssets)(unscopedAssetSettings) ++ inConfig(TestAssets)(nodeModulesSettings) ++ packageSettings - val unscopedAssetSettings: Seq[Setting[_]] = Seq( + val unscopedAssetSettings: Seq[Setting[?]] = Seq( includeFilter := GlobFilter("*"), sourceGenerators := Nil, managedSourceDirectories := Nil, @@ -286,7 +291,7 @@ object SbtWeb extends AutoPlugin { unmanagedSourceDirectories := Seq(sourceDirectory.value), unmanagedSources := unmanagedSourceDirectories.value .descendantsExcept(includeFilter.value, excludeFilter.value) - .get, + .get(), sourceDirectories := managedSourceDirectories.value ++ unmanagedSourceDirectories.value, sources := managedSources.value ++ unmanagedSources.value, (sources / mappings) := relativeMappings(sources, sourceDirectories).value, @@ -296,7 +301,7 @@ object SbtWeb extends AutoPlugin { unmanagedResourceDirectories := Seq(resourceDirectory.value), unmanagedResources := unmanagedResourceDirectories.value .descendantsExcept(includeFilter.value, excludeFilter.value) - .get, + .get(), resourceDirectories := managedResourceDirectories.value ++ unmanagedResourceDirectories.value, resources := managedResources.value ++ unmanagedResources.value, (resources / mappings) := relativeMappings(resources, resourceDirectories).value, @@ -323,11 +328,12 @@ object SbtWeb extends AutoPlugin { allPipelineStages := Pipeline.chain(pipelineStages).value, mappings := allPipelineStages.value(mappings.value), deduplicators := Nil, - mappings := deduplicateMappings(mappings.value, deduplicators.value), + mappings := deduplicateMappings(mappings.value, deduplicators.value, fileConverter.value), assets := syncMappings( streams.value.cacheStoreFactory.make(s"sync-assets-" + configuration.value.name), mappings.value, - public.value + public.value, + fileConverter.value ), exportedMappings := createWebJarMappings.value, addExportedMappingsToPackageBinMappings := true, @@ -384,7 +390,8 @@ object SbtWeb extends AutoPlugin { syncMappings( streams.value.cacheStoreFactory.make("sync-exported-assets-" + configuration.value.name), exportedMappings.value, - syncTargetDir + syncTargetDir, + fileConverter.value ) } else @@ -395,11 +402,12 @@ object SbtWeb extends AutoPlugin { * Create package mappings for assets in the webjar format. Use the webjars path prefix and exclude all web module * assets. */ - def createWebJarMappings: Def.Initialize[Task[Seq[(File, String)]]] = Def.task { + def createWebJarMappings: Def.Initialize[Task[Seq[(FileRef, String)]]] = Def.task { def webModule(file: File) = webModuleDirectories.value.exists(dir => IO.relativize(dir, file).isDefined) + implicit val fc: FileConverter = fileConverter.value mappings.value flatMap { - case (file, path) if webModule(file) => None - case (file, path) => Some(file -> (webJarsPathPrefix.value + path)) + case (file, path) if webModule(toFile(file)) => None + case (file, path) => Some(file -> (webJarsPathPrefix.value + path)) } } @@ -427,7 +435,8 @@ object SbtWeb extends AutoPlugin { if (state.value.get(disableExportedProducts).getOrElse(false)) { Seq.empty } else { - Seq(Attributed.blank(exportTask.value).put(webModulesLib.key, moduleName.value)) + implicit val fc: FileConverter = fileConverter.value + Seq(Attributed.blank(toFileRef(exportTask.value)).put(toKey(webModulesLib), moduleName.value)) } } @@ -441,7 +450,7 @@ object SbtWeb extends AutoPlugin { /** * Create package mappings for all assets, adding the optional prefix. */ - def packageAssetsMappings = Def.task { + def packageAssetsMappings: Def.Initialize[Task[Seq[(FileRef, String)]]] = Def.task { val prefix = packagePrefix.value (Defaults.ConfigGlobal / pipeline).value map { case (file, path) => file -> (prefix + path) @@ -451,15 +460,15 @@ object SbtWeb extends AutoPlugin { /** * Get module names for all internal web module dependencies on the classpath. */ - def getInternalWebModules(conf: Configuration) = Def.task { - (conf / internalDependencyClasspath).value.flatMap(_.get(WebKeys.webModulesLib.key)) + def getInternalWebModules(conf: Configuration): Def.Initialize[Task[Seq[String]]] = Def.task { + (conf / internalDependencyClasspath).value.flatMap(_.get(toKey(WebKeys.webModulesLib))) } /** * Remove web module dependencies from a classpath. This is a helper method for Play 2.3 transitions. */ def classpathWithoutAssets(classpath: Classpath): Classpath = { - classpath.filter(_.get(WebKeys.webModulesLib.key).isEmpty) + classpath.filter(_.get(toKey(WebKeys.webModulesLib)).isEmpty) } def flattenDirectWebModules = Def.task { @@ -489,8 +498,10 @@ object SbtWeb extends AutoPlugin { prefixes.find(s.startsWith).fold(s)(s.stripPrefix) } - private def classLoader(classpath: Classpath): ClassLoader = - new java.net.URLClassLoader(Path.toURLs(classpath.files), null) + private def classLoader(classpath: Classpath, conv: FileConverter): ClassLoader = { + implicit val fc: FileConverter = conv + new java.net.URLClassLoader(Path.toURLs(classpathToFiles(classpath)), null) + } private def withWebJarExtractor(to: File, cacheFile: File, classLoader: ClassLoader)( block: (WebJarExtractor, File) => Unit @@ -503,14 +514,14 @@ object SbtWeb extends AutoPlugin { private def generateNodeWebJars(target: File, cache: File, classLoader: ClassLoader): Seq[File] = { withWebJarExtractor(target, cache, classLoader) { (e, to) => e.extractAllNodeModulesTo(to) - }.**(AllPassFilter).get + }.**(AllPassFilter).get() } private def generateWebJars(target: File, lib: String, cache: File, classLoader: ClassLoader): Seq[File] = { withWebJarExtractor(target / lib, cache, classLoader) { (e, to) => e.extractAllWebJarsTo(to) } - target.**(AllPassFilter).get + target.**(AllPassFilter).get() } // Mapping deduplication @@ -529,16 +540,21 @@ object SbtWeb extends AutoPlugin { * @return * the (possibly) deduplicated mappings */ - def deduplicateMappings(mappings: Seq[PathMapping], deduplicators: Seq[Deduplicator]): Seq[PathMapping] = { + def deduplicateMappings( + mappings: Seq[PathMapping], + deduplicators: Seq[Deduplicator], + conv: FileConverter + ): Seq[PathMapping] = { + implicit val fc: FileConverter = conv if (deduplicators.isEmpty) { mappings } else { mappings.groupBy(_._2 /*path*/ ).toSeq flatMap { grouped => val (path, group) = grouped if (group.size > 1) { - val files = group.map(_._1) + val files = group.map(mapping => toFile(mapping._1)) val deduplicated = firstResult(deduplicators)(files) - deduplicated.fold(group)(file => Seq((file, path))) + deduplicated.fold(group)(file => Seq((toFileRef(file), path))) } else { group } @@ -604,9 +620,15 @@ object SbtWeb extends AutoPlugin { * @return * the target value */ - def syncMappings(cacheStore: sbt.util.CacheStore, mappings: Seq[PathMapping], target: File): File = { + def syncMappings( + cacheStore: sbt.util.CacheStore, + mappings: Seq[PathMapping], + target: File, + conv: FileConverter + ): File = { + implicit val fc: FileConverter = conv val copies = mappings map { case (file, path) => - file -> (target / path) + toFile(file) -> (target / path) } Sync.sync(cacheStore)(copies) target diff --git a/src/main/scala/com/typesafe/sbt/web/incremental/Bytes.scala b/src/main/scala/com/typesafe/sbt/web/incremental/Bytes.scala index c021dff..e88aeee 100644 --- a/src/main/scala/com/typesafe/sbt/web/incremental/Bytes.scala +++ b/src/main/scala/com/typesafe/sbt/web/incremental/Bytes.scala @@ -3,19 +3,19 @@ */ package com.typesafe.sbt.web.incremental -import java.util.Arrays +import java.util /** * Wraps a byte array to ensure immutability. */ class Bytes(private[incremental] val arr: Array[Byte]) { - override def toString = Arrays.toString(arr) + override def toString: String = util.Arrays.toString(arr) override def equals(that: Any): Boolean = that match { case null => false - case other: Bytes => Arrays.equals(arr, other.arr) + case other: Bytes => util.Arrays.equals(arr, other.arr) case _ => false } - override def hashCode: Int = Arrays.hashCode(arr) + override def hashCode: Int = util.Arrays.hashCode(arr) } /** diff --git a/src/main/scala/com/typesafe/sbt/web/incremental/OpCache.scala b/src/main/scala/com/typesafe/sbt/web/incremental/OpCache.scala index 8e0dbaa..b5d3003 100644 --- a/src/main/scala/com/typesafe/sbt/web/incremental/OpCache.scala +++ b/src/main/scala/com/typesafe/sbt/web/incremental/OpCache.scala @@ -3,16 +3,16 @@ */ package com.typesafe.sbt.web.incremental +import com.typesafe.sbt.PluginCompat.toSet import java.io.File import sbt.Hash -import scala.collection.immutable.Set /** * Cache for recording which operations have successfully completed. Associates a hash of the operations' inputs * (OpInputHash) with a record of the files that were accessed by the operation. */ private[incremental] class OpCache(var content: Map[OpInputHash, OpCache.Record] = Map.empty) { - import OpCache._ + import OpCache.* def allOpInputHashes: Set[OpInputHash] = content.keySet def contains(oih: OpInputHash): Boolean = { content.contains(oih) @@ -20,10 +20,10 @@ private[incremental] class OpCache(var content: Map[OpInputHash, OpCache.Record] def getRecord(oih: OpInputHash): Option[Record] = { content.get(oih) } - def putRecord(oih: OpInputHash, record: Record) = { + def putRecord(oih: OpInputHash, record: Record): Unit = { content = content + ((oih, record)) } - def removeRecord(oih: OpInputHash) = { + def removeRecord(oih: OpInputHash): Unit = { content = content - oih } } @@ -74,7 +74,7 @@ private[incremental] object OpCache { * Remove all operations from the cache that aren't in the given set of operations. */ def vacuumExcept[Op](cache: OpCache, opsToKeep: Seq[Op])(implicit opInputHasher: OpInputHasher[Op]): Unit = { - val oihSet: Set[OpInputHash] = opsToKeep.map(opInputHasher.hash).to[Set] + val oihSet: Set[OpInputHash] = toSet(opsToKeep.map(opInputHasher.hash)) for (oih <- cache.allOpInputHashes) yield { if (!oihSet.contains(oih)) { cache.removeRecord(oih) @@ -129,6 +129,6 @@ private[incremental] object OpCache { ops.flatMap { op => val record = cache.getRecord(opInputHasher.hash(op)) record.fold(Set.empty[File])(_.products) - }.toSet + } } } diff --git a/src/main/scala/com/typesafe/sbt/web/incremental/OpCacheIO.scala b/src/main/scala/com/typesafe/sbt/web/incremental/OpCacheIO.scala index 1a3f5da..5f47103 100644 --- a/src/main/scala/com/typesafe/sbt/web/incremental/OpCacheIO.scala +++ b/src/main/scala/com/typesafe/sbt/web/incremental/OpCacheIO.scala @@ -31,17 +31,18 @@ private[incremental] object OpCacheIO { */ private[incremental] object OpCacheProtocol { - import sjsonnew._ - import BasicJsonProtocol._ + import sjsonnew.* + import BasicJsonProtocol.* import OpCache.{ FileHash, Record } - implicit val fileFormat: JsonFormat[File] = projectFormat[File, String](_.getAbsolutePath, new File(_)) - implicit val bytesFormat: JsonFormat[Bytes] = projectFormat[Bytes, String]( + implicit val fileFormat: JsonFormat[File] = + BasicJsonProtocol.projectFormat[File, String](_.getAbsolutePath, new File(_)) + implicit val bytesFormat: JsonFormat[Bytes] = BasicJsonProtocol.projectFormat[Bytes, String]( bytes => Base64.getEncoder.encodeToString(bytes.arr), bytes => new Bytes(Base64.getDecoder.decode(bytes)) ) - implicit val opInputHashKeyFormat = JsonKeyFormat[OpInputHash]( + implicit val opInputHashKeyFormat: JsonKeyFormat[OpInputHash] = JsonKeyFormat[OpInputHash]( hash => Base64.getEncoder.encodeToString(hash.bytes.arr), hashBytes => OpInputHash(Bytes(Base64.getDecoder.decode(hashBytes))) ) @@ -87,6 +88,6 @@ private[incremental] object OpCacheProtocol { } implicit val opCacheFormat: JsonFormat[OpCache] = { - projectFormat[OpCache, Map[OpInputHash, Record]](_.content, new OpCache(_)) + BasicJsonProtocol.projectFormat[OpCache, Map[OpInputHash, Record]](_.content, new OpCache(_)) } } diff --git a/src/main/scala/com/typesafe/sbt/web/incremental/OpInputHash.scala b/src/main/scala/com/typesafe/sbt/web/incremental/OpInputHash.scala index e664a03..32fca58 100644 --- a/src/main/scala/com/typesafe/sbt/web/incremental/OpInputHash.scala +++ b/src/main/scala/com/typesafe/sbt/web/incremental/OpInputHash.scala @@ -42,6 +42,6 @@ object OpInputHasher { * Construct an OpInputHash that uses the given hashing logic. */ def apply[Op](f: Op => OpInputHash): OpInputHasher[Op] = new OpInputHasher[Op] { - def hash(op: Op) = f(op) + def hash(op: Op): OpInputHash = f(op) } } diff --git a/src/main/scala/com/typesafe/sbt/web/incremental/OpResult.scala b/src/main/scala/com/typesafe/sbt/web/incremental/OpResult.scala index 8f53b49..d0a38ca 100644 --- a/src/main/scala/com/typesafe/sbt/web/incremental/OpResult.scala +++ b/src/main/scala/com/typesafe/sbt/web/incremental/OpResult.scala @@ -20,4 +20,4 @@ final case class OpSuccess(filesRead: Set[File], filesWritten: Set[File]) extend /** * An operation that failed. */ -final case object OpFailure extends OpResult +case object OpFailure extends OpResult diff --git a/src/main/scala/com/typesafe/sbt/web/incremental/package.scala b/src/main/scala/com/typesafe/sbt/web/incremental/package.scala index 3b962b5..4da2feb 100644 --- a/src/main/scala/com/typesafe/sbt/web/incremental/package.scala +++ b/src/main/scala/com/typesafe/sbt/web/incremental/package.scala @@ -4,6 +4,7 @@ package com.typesafe.sbt.web import java.io.File +import com.typesafe.sbt.PluginCompat.toSet /** * The incremental task API lets tasks run more quickly when they are called more than once. The idea is to do less work @@ -99,7 +100,7 @@ package object incremental { val cache: OpCache = OpCacheIO.fromFile(cacheFile) // Before vacuuming the cache, find out what the old cache had produced - val allOldProducts = cache.content.values.to[Set].flatMap(_.products) + val allOldProducts = toSet(cache.content.values).flatMap(_.products) // Clear out any unknown operations from the existing cache OpCache.vacuumExcept(cache, ops) @@ -109,17 +110,17 @@ package object incremental { val (results: Map[Op, OpResult], finalResult) = runOps(prunedOps) // Check returned results are all within the set of given ops - val prunedOpsSet: Set[Op] = prunedOps.to[Set] + val prunedOpsSet: Set[Op] = toSet(prunedOps) val resultOpsSet: Set[Op] = results.keySet val unexpectedOps: Set[Op] = resultOpsSet -- prunedOpsSet - if (!unexpectedOps.isEmpty) { + if (unexpectedOps.nonEmpty) { throw new IllegalArgumentException(s"runOps function returned results for unknown ops: $unexpectedOps") } // Work out what the current valid products are - val opsSet: Set[Op] = ops.to[Set] + val opsSet: Set[Op] = toSet(ops) val oldProductsToKeep = OpCache.productsForOps(cache, opsSet -- prunedOpsSet) - val newProducts = results.values.to[Set].flatMap { + val newProducts = toSet(results.values).flatMap { case OpFailure => Set.empty[File] case OpSuccess(_, products) => products } @@ -147,6 +148,6 @@ package object incremental { * } * }}} */ - implicit def toStringInputHasher[Op] = OpInputHasher[Op](op => OpInputHash.hashString(op.toString)) + implicit def toStringInputHasher[Op]: OpInputHasher[Op] = OpInputHasher[Op](op => OpInputHash.hashString(op.toString)) } diff --git a/src/main/scala/com/typesafe/sbt/web/package.scala b/src/main/scala/com/typesafe/sbt/web/package.scala index 1dc1402..63ac456 100644 --- a/src/main/scala/com/typesafe/sbt/web/package.scala +++ b/src/main/scala/com/typesafe/sbt/web/package.scala @@ -1,5 +1,7 @@ package com.typesafe.sbt +import PluginCompat.FileRef + import java.io.File package object web { @@ -7,7 +9,7 @@ package object web { /** * Describes a string path relative to a base directory. */ - type PathMapping = (File, String) + type PathMapping = (FileRef, String) /** * A function for possibly selecting a single file from a sequence. diff --git a/src/sbt-test/sbt-web/asset-pipeline/build.sbt b/src/sbt-test/sbt-web/asset-pipeline/build.sbt index 3e48e43..cf6262f 100644 --- a/src/sbt-test/sbt-web/asset-pipeline/build.sbt +++ b/src/sbt-test/sbt-web/asset-pipeline/build.sbt @@ -8,17 +8,17 @@ val coffee = taskKey[Seq[File]]("mock coffeescript processing") coffee := { // translate .coffee files into .js files - val sourceDir = (sourceDirectory in Assets).value + val sourceDir = (Assets / sourceDirectory).value val targetDir = target.value / "cs-plugin" val sources = sourceDir ** "*.coffee" val mappings = sources pair Path.relativeTo(sourceDir) val renamed = mappings map { case (file, path) => file -> path.replaceAll("coffee", "js") } - val copies = renamed map { case (file, path) => file -> (resourceManaged in Assets).value / path } + val copies = renamed map { case (file, path) => file -> (Assets / resourceManaged).value / path } IO.copy(copies) copies map (_._2) } -sourceGenerators in Assets += coffee.taskValue +Assets / sourceGenerators += coffee.taskValue val jsmin = taskKey[Pipeline.Stage]("mock js minifier") diff --git a/src/sbt-test/sbt-web/deduplicate/build.sbt b/src/sbt-test/sbt-web/deduplicate/build.sbt index 4d46ea4..49f83e3 100644 --- a/src/sbt-test/sbt-web/deduplicate/build.sbt +++ b/src/sbt-test/sbt-web/deduplicate/build.sbt @@ -1,3 +1,3 @@ lazy val root = (project in file(".")).enablePlugins(SbtWeb) -WebKeys.deduplicators in Assets += SbtWeb.selectFileFrom((sourceDirectory in Assets).value) +Assets / WebKeys.deduplicators += SbtWeb.selectFileFrom((Assets / sourceDirectory).value) diff --git a/src/sbt-test/sbt-web/dev-pipeline/build.sbt b/src/sbt-test/sbt-web/dev-pipeline/build.sbt index c492e74..fdcc2cf 100644 --- a/src/sbt-test/sbt-web/dev-pipeline/build.sbt +++ b/src/sbt-test/sbt-web/dev-pipeline/build.sbt @@ -22,4 +22,4 @@ transform := { } } -pipelineStages in Assets := Seq(transform) +Assets / pipelineStages := Seq(transform) diff --git a/src/sbt-test/sbt-web/multi-module/build.sbt b/src/sbt-test/sbt-web/multi-module/build.sbt index f76e512..e1b06df 100644 --- a/src/sbt-test/sbt-web/multi-module/build.sbt +++ b/src/sbt-test/sbt-web/multi-module/build.sbt @@ -6,7 +6,7 @@ lazy val b = (project in file("modules/b")) .enablePlugins(SbtWeb) .dependsOn(c % "compile;test->test", d % "compile;test->test") .settings( - WebKeys.directWebModules in TestAssets := Nil + TestAssets / WebKeys.directWebModules := Nil ) lazy val c = (project in file("modules/c")) diff --git a/src/sbt-test/sbt-web/multi-module/test b/src/sbt-test/sbt-web/multi-module/test index b5f53c2..f70d68b 100644 --- a/src/sbt-test/sbt-web/multi-module/test +++ b/src/sbt-test/sbt-web/multi-module/test @@ -71,7 +71,7 @@ $ exists modules/e/target/web/public/test/lib/jquery/jquery.js # Let's optimize the syncing -> set trackInternalDependencies in ThisBuild := TrackLevel.TrackIfMissing +> set ThisBuild / trackInternalDependencies := TrackLevel.TrackIfMissing > a/assets diff --git a/src/sbt-test/sbt-web/package/test b/src/sbt-test/sbt-web/package/test index 96a6afe..689d52a 100644 --- a/src/sbt-test/sbt-web/package/test +++ b/src/sbt-test/sbt-web/package/test @@ -5,7 +5,7 @@ $ exists target/web-project-0.1-web-assets.jar $ exists extracted/js/a.js $ exists extracted/lib/jquery/jquery.js -> 'set WebKeys.packagePrefix in Assets := "public/"' +> 'set Assets / WebKeys.packagePrefix := "public/"' > web-assets:package $ delete extracted diff --git a/src/test/scala/com/typesafe/sbt/web/incremental/IncrementalSpec.scala b/src/test/scala/com/typesafe/sbt/web/incremental/IncrementalSpec.scala index 0a394a4..49dcb72 100644 --- a/src/test/scala/com/typesafe/sbt/web/incremental/IncrementalSpec.scala +++ b/src/test/scala/com/typesafe/sbt/web/incremental/IncrementalSpec.scala @@ -269,7 +269,8 @@ class IncrementalSpec extends Specification { IO.write(file2, "x") var hashPrefix = "" - implicit val hasher = OpInputHasher[String](op => OpInputHash.hashString(hashPrefix + op)) + implicit val hasher: OpInputHasher[String] = + OpInputHasher[String](op => OpInputHash.hashString(hashPrefix + op)) // Cache ops with an initial hash prefix @@ -580,7 +581,8 @@ class IncrementalSpec extends Specification { IO.write(file2, "x") var hashPrefix = "" - implicit val hasher = OpInputHasher[String](op => OpInputHash.hashString(hashPrefix + op)) + implicit val hasher: OpInputHasher[String] = + OpInputHasher[String](op => OpInputHash.hashString(hashPrefix + op)) // Cache ops with an initial hash prefix @@ -641,7 +643,7 @@ class IncrementalSpec extends Specification { "op1" -> OpSuccess(Set.empty, Set(file1)), "op2" -> OpSuccess(Set.empty, Set(file2)) ), - Unit + () ) } val (outputFiles, _) = syncIncremental(tmpDir, List("op1")) { prunedOps => @@ -673,7 +675,7 @@ class IncrementalSpec extends Specification { Map[String, OpResult]( "op1" -> OpSuccess(Set(infile), Set(file1, file2)) ), - Unit + () ) } @@ -709,7 +711,7 @@ class IncrementalSpec extends Specification { "op1" -> OpSuccess(Set(infile), Set(file1)), "op2" -> OpSuccess(Set.empty, Set(file2)) ), - Unit + () ) } @@ -721,7 +723,7 @@ class IncrementalSpec extends Specification { "op1" -> OpSuccess(Set(infile), Set.empty), "op3" -> OpSuccess(Set.empty, Set(file1, file2)) ), - Unit + () ) }