diff --git a/.github/workflows/scala.yml b/.github/workflows/scala.yml new file mode 100644 index 00000000..fbeee571 --- /dev/null +++ b/.github/workflows/scala.yml @@ -0,0 +1,17 @@ +name: Scala CI + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - name: Set up JDK 1.8 + uses: actions/setup-java@v1 + with: + java-version: 1.8 + - name: Run tests + run: sbt test diff --git a/.travis.yml b/.travis.yml index 2a043c46..25972266 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,16 +17,15 @@ before_cache: matrix: include: - jdk: oraclejdk8 - env: TEST_SPARK_VERSION="2.4.2" LUCENERDD_ANALYZER_NAME="en" LUCENERDD_LINKER_METHOD="cartesian" + env: LUCENERDD_ANALYZER_NAME="en" LUCENERDD_LINKER_METHOD="cartesian" - jdk: openjdk8 - env: TEST_SPARK_VERSION="2.4.2" LUCENERDD_ANALYZER_NAME="en" LUCENERDD_LINKER_METHOD="collectbroadcast" + env: LUCENERDD_ANALYZER_NAME="en" LUCENERDD_LINKER_METHOD="collectbroadcast" - jdk: openjdk8 - env: TEST_SPARK_VERSION="2.4.2" LUCENERDD_ANALYZER_NAME="whitespace" LUCENERDD_LINKER_METHOD="cartesian" + env: LUCENERDD_ANALYZER_NAME="whitespace" LUCENERDD_LINKER_METHOD="cartesian" - jdk: oraclejdk8 - env: TEST_SPARK_VERSION="2.4.2" LUCENERDD_ANALYZER_NAME="whitespace" LUCENERDD_LINKER_METHOD="collectbroadcast" + env: LUCENERDD_ANALYZER_NAME="whitespace" LUCENERDD_LINKER_METHOD="collectbroadcast" script: - - sbt ++$TRAVIS_SCALA_VERSION clean update - -Dlucenerdd.spatial.linker.method=${LUCENE_SPATIAL_LINKER_METHOD} -test + - sbt ++$TRAVIS_SCALA_VERSION -Dlucenerdd.spatial.linker.method=${LUCENE_SPATIAL_LINKER_METHOD} clean update test - sbt ++$TRAVIS_SCALA_VERSION scalastyle - sbt ++$TRAVIS_SCALA_VERSION assembly - travis_wait 30 sbt ++$TRAVIS_SCALA_VERSION clean coverage test coverageReport diff --git a/build.sbt b/build.sbt index c7a7f74b..4050268c 100644 --- a/build.sbt +++ b/build.sbt @@ -79,32 +79,20 @@ pomExtra := credentials += Credentials(Path.userHome / ".sbt" / ".credentials") -val luceneV = "8.0.0" - -spName := "zouzias/spark-lucenerdd" -sparkVersion := "2.4.2" -spShortDescription := "Spark RDD with Lucene's query capabilities" -sparkComponents ++= Seq("core", "sql", "mllib") -spAppendScalaVersion := true -// This is necessary because of how we explicitly specify Spark dependencies -// for tests rather than using the sbt-spark-package plugin to provide them. -spIgnoreProvided := true - -val testSparkVersion = settingKey[String]("The version of Spark to test against.") - -testSparkVersion := sys.props.get("spark.testVersion").getOrElse(sparkVersion.value) +val luceneV = "8.4.0" +val sparkVersion = "2.4.4" // scalastyle:off -val scalactic = "org.scalactic" %% "scalactic" % "3.0.7" -val scalatest = "org.scalatest" %% "scalatest" % "3.0.7" % "test" +val scalactic = "org.scalactic" %% "scalactic" % "3.1.0" +val scalatest = "org.scalatest" %% "scalatest" % "3.1.0" % "test" -val joda_time = "joda-time" % "joda-time" % "2.10.1" -val algebird = "com.twitter" %% "algebird-core" % "0.13.5" -val joda_convert = "org.joda" % "joda-convert" % "2.2.0" +val joda_time = "joda-time" % "joda-time" % "2.10.5" +val algebird = "com.twitter" %% "algebird-core" % "0.13.6" +val joda_convert = "org.joda" % "joda-convert" % "2.2.1" val spatial4j = "org.locationtech.spatial4j" % "spatial4j" % "0.7" -val typesafe_config = "com.typesafe" % "config" % "1.3.3" +val typesafe_config = "com.typesafe" % "config" % "1.3.4" val lucene_facet = "org.apache.lucene" % "lucene-facet" % luceneV val lucene_analyzers = "org.apache.lucene" % "lucene-analyzers-common" % luceneV @@ -135,9 +123,10 @@ libraryDependencies ++= Seq( ) libraryDependencies ++= Seq( - "org.apache.spark" %% "spark-core" % testSparkVersion.value % "test" force(), - "org.apache.spark" %% "spark-sql" % testSparkVersion.value % "test" force(), - "com.holdenkarau" %% "spark-testing-base" % s"2.4.0_0.11.0" % "test" intransitive(), + "org.apache.spark" %% "spark-core" % sparkVersion % "provided", + "org.apache.spark" %% "spark-sql" % sparkVersion % "provided", + "org.apache.spark" %% "spark-mllib" % sparkVersion % "provided", + "com.holdenkarau" %% "spark-testing-base" % s"2.4.3_0.12.0" % "test" intransitive(), "org.scala-lang" % "scala-library" % scalaVersion.value % "compile" ) diff --git a/deployToSonartype.md b/deployToSonartype.md new file mode 100644 index 00000000..edb371bf --- /dev/null +++ b/deployToSonartype.md @@ -0,0 +1,25 @@ +## Setup + +# Add `sonatype.sbt` file under `~/.sbt/1.0/` folder with contents + +``` +credentials += Credentials("Sonatype Nexus Repository Manager", + "oss.sonatype.org", + "zouzias", + "PASSWORD_HERE") +``` + +## Run sbt release to release signed both 2.10 and 2.11 + +``` +sbt release +``` + +## Then, git checkout v0.X.X to the release tag first, and then type + +``` +sbt sonatypeRelease +``` + +## This will allow sonatype to release the artifacts to maven central. +## An alternative is to browse to https://oss.sonatype.org and do it manually diff --git a/deployToSonartype.sh b/deployToSonartype.sh deleted file mode 100644 index 8ea75f35..00000000 --- a/deployToSonartype.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - - -# Run sbt release to release signed both 2.10 and 2.11 -sbt release - - -# Then, git checkout v0.X.X to the release tag first, and then type - -sbt sonatypeRelease - -# This will allow sonatype to release the artifacts to maven central. -# An alternative is to browse to https://oss.sonatype.org and do it manually diff --git a/project/build.properties b/project/build.properties index 8e682c52..00b48d97 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.18 +sbt.version=1.3.6 diff --git a/project/plugins.sbt b/project/plugins.sbt index e3447af7..50dd37ab 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -17,22 +17,20 @@ resolvers += "bintray-spark-packages" at "https://dl.bintray.com/spark-packages/maven/" -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.7.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.4.0") +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.0") -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.11") +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1") -addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.6") +addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.7") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.5") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1") - -addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.6") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2") diff --git a/spark-shell.sh b/spark-shell.sh index 82789320..d4b74db8 100755 --- a/spark-shell.sh +++ b/spark-shell.sh @@ -6,7 +6,7 @@ CURRENT_DIR=`pwd` SPARK_LUCENERDD_VERSION=`cat version.sbt | awk '{print $5}' | xargs` # You should have downloaded this spark version under your ${HOME} -SPARK_VERSION="2.4.0" +SPARK_VERSION="2.4.4" echo "===============================================" echo "Loading LuceneRDD with version ${SPARK_LUCENERDD_VERSION}"