Skip to content

Commit

Permalink
Address scala 2_12.15 deprecation, advance to scala 2_12.19 (#701)
Browse files Browse the repository at this point in the history
  • Loading branch information
a0x8o authored Sep 6, 2024
1 parent 36bf612 commit b0b8530
Show file tree
Hide file tree
Showing 7 changed files with 134 additions and 149 deletions.
196 changes: 95 additions & 101 deletions .github/workflows/staging-release.yml
Original file line number Diff line number Diff line change
@@ -1,105 +1,99 @@
name: Push to staging repositories
on:
workflow_dispatch:
inputs:
release-tag:
description: "Git tag for release"
required: true
spark-version:
description: "Spark version to build against"
default: "3.5.1"
scala-version:
description: "Scala version to use when building Glow"
default: "2.12.15"
java-version:
description: "Java version to use when building Glow"
default: "8"
push-python:
description: "If true, Python artifacts will be pushed to Test PyPI"
type: boolean
default: true
workflow_dispatch:
inputs:
release-tag:
description: "Git tag for release"
required: true
spark-version:
description: "Spark version to build against"
default: "3.5.1"
scala-version:
description: "Scala version to use when building Glow"
default: "2.12.19"
java-version:
description: "Java version to use when building Glow"
default: "8"
push-python:
description: "If true, Python artifacts will be pushed to Test PyPI"
type: boolean
default: true

jobs:
push-to-staging:
runs-on: ubuntu-latest
environment: Staging
permissions:
id-token: write # Necessary to fetch a token for PyPI publishing
defaults:
run:
shell: bash -el {0}
env:
SPARK_VERSION: ${{ inputs.spark-version }}
SCALA_VERSION: ${{ inputs.scala-version }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: refs/tags/${{ inputs.release-tag }}

- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'adopt'
java-version: ${{ inputs.java-version }}

- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
environment-file: python/environment.yml
activate-environment: glow

- name: Import GPG Key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}

- name: Set Sonatype credentials
run: |
cat <<EOF >>sonatype.sbt
credentials += Credentials("Sonatype Nexus Repository Manager",
"oss.sonatype.org",
"${{ secrets.SONATYPE_USERNAME }}",
"${{ secrets.SONATYPE_PASSWORD }}")
EOF
- name: Install correct PySpark version
run: pip install pyspark==${{ inputs.spark-version }}

- name: Run tests
run: sbt clean core/checkNoSnapshotDependencies core/test python/test docs/test exit

- name: Push Scala artifacts
run: sbt publishSigned sonatypePrepare sonatypeBundleUpload sonatypeClose exit

- name: Test staged artifact
run: sbt stagedRelease/test exit

- name: Build whl, sdist, and assembly jar
run: |
bin/build --scala --python
(cd python && python setup.py sdist)
- name: Publish package distributions to TestPyPI
uses: pypa/gh-action-pypi-publish@release/v1
if: ${{ inputs.push-python }}
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: python/dist/

- name: Upload whl and assembly jar
uses: actions/upload-artifact@v4
with:
name: binaries-${{ github.job }}
path: |
core/target/**/glow*assembly*.jar
python/dist/*.whl




push-to-staging:
runs-on: ubuntu-latest
environment: Staging
permissions:
id-token: write # Necessary to fetch a token for PyPI publishing
defaults:
run:
shell: bash -el {0}
env:
SPARK_VERSION: ${{ inputs.spark-version }}
SCALA_VERSION: ${{ inputs.scala-version }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: refs/tags/${{ inputs.release-tag }}

- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: "adopt"
java-version: ${{ inputs.java-version }}

- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
environment-file: python/environment.yml
activate-environment: glow

- name: Import GPG Key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}

- name: Set Sonatype credentials
run: |
cat <<EOF >>sonatype.sbt
credentials += Credentials("Sonatype Nexus Repository Manager",
"oss.sonatype.org",
"${{ secrets.SONATYPE_USERNAME }}",
"${{ secrets.SONATYPE_PASSWORD }}")
EOF
- name: Install correct PySpark version
run: pip install pyspark==${{ inputs.spark-version }}

- name: Run tests
run: sbt clean core/checkNoSnapshotDependencies core/test python/test docs/test exit

- name: Push Scala artifacts
run: sbt publishSigned sonatypePrepare sonatypeBundleUpload sonatypeClose exit

- name: Test staged artifact
run: sbt stagedRelease/test exit

- name: Build whl, sdist, and assembly jar
run: |
bin/build --scala --python
(cd python && python setup.py sdist)
- name: Publish package distributions to TestPyPI
uses: pypa/gh-action-pypi-publish@release/v1
if: ${{ inputs.push-python }}
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: python/dist/

- name: Upload whl and assembly jar
uses: actions/upload-artifact@v4
with:
name: binaries-${{ github.job }}
path: |
core/target/**/glow*assembly*.jar
python/dist/*.whl
49 changes: 20 additions & 29 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ on:
branches:
- main
tags:
- '**'
- "**"
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
Expand Down Expand Up @@ -37,16 +37,14 @@ jobs:
uses: actions/cache@v4
with:
path: /usr/share/miniconda/envs/glow
key:
conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
key: conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache

- name: Update environment
run:
conda env update -n glow -f python/environment.yml
run: conda env update -n glow -f python/environment.yml
if: steps.cache.outputs.cache-hit != 'true'

- name: Install Certs
Expand All @@ -56,17 +54,15 @@ jobs:
run: (cd docs && make linkcheck)
continue-on-error: true


spark-tests:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
strategy:
matrix:

spark_version: [3.4.1, 3.5.1]
scala_version: [2.12.15]
scala_version: [2.12.19]
env:
SPARK_VERSION: ${{ matrix.spark_version }}
SCALA_VERSION: ${{ matrix.scala_version }}
Expand All @@ -77,9 +73,9 @@ jobs:
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'adopt'
java-version: '8'
cache: 'sbt'
distribution: "adopt"
java-version: "8"
cache: "sbt"
cache-dependency-path: |
build.sbt
plugins.sbt
Expand All @@ -94,27 +90,25 @@ jobs:
uses: actions/cache@v4
with:
path: /usr/share/miniconda/envs/glow
key:
conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
key: conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache

- name: Update environment
run:
conda env update -n glow -f python/environment.yml
run: conda env update -n glow -f python/environment.yml
if: steps.cache.outputs.cache-hit != 'true'

- name: Install correct PySpark version
run: pip install pyspark==${{ matrix.spark_version }}

- name: Scala tests
run: sbt compile coverage core/test core/coverageReport exit

- name: Python tests
run: sbt python/test exit

- name: Docs tests
run: sbt docs/test exit

Expand Down Expand Up @@ -153,8 +147,7 @@ jobs:
runs-on: ubuntu-latest
needs: spark-tests
steps:
- run: echo "Spark tests passed!"

- run: echo "Spark tests passed!"

spark-4-tests:
runs-on: ubuntu-latest
Expand All @@ -164,17 +157,17 @@ jobs:
shell: bash -el {0}
env:
SPARK_VERSION: 4.0.0-SNAPSHOT
SCALA_VERSION: 2.13.12
SCALA_VERSION: 2.13.14
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: 'adopt'
java-version: '17'
cache: 'sbt'
distribution: "adopt"
java-version: "17"
cache: "sbt"
cache-dependency-path: |
build.sbt
plugins.sbt
Expand All @@ -189,16 +182,14 @@ jobs:
uses: actions/cache@v3
with:
path: /usr/share/miniconda/envs/glow-spark4
key:
conda-${{ hashFiles('python/spark-4-environment.yml') }}-${{ env.CACHE_NUMBER }}
key: conda-${{ hashFiles('python/spark-4-environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache

- name: Update environment
run:
conda env update -n glow-spark4 -f python/spark-4-environment.yml
run: conda env update -n glow-spark4 -f python/spark-4-environment.yml
if: steps.cache.outputs.cache-hit != 'true'

- name: Clone Spark (for PySpark source)
Expand All @@ -209,10 +200,10 @@ jobs:

- name: Uninstall PySpark
run: pip uninstall -y pyspark

- name: Python tests
run: EXTRA_PYTHON_PATH=$HOME/spark/python sbt python/test exit

- name: Docs tests
run: EXTRA_PYTHON_PATH=$HOME/spark/python sbt docs/test exit

Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ tools and libraries implemented as command line tools or Pandas functions.
This project is built using [sbt](https://www.scala-sbt.org/1.0/docs/Setup.html) and Java 8.

To build and run Glow, you must [install conda](https://docs.conda.io/en/latest/miniconda.html) and
activate the environment in `python/environment.yml`.
activate the environment in `python/environment.yml`.
```
conda env create -f python/environment.yml
conda activate glow
Expand All @@ -64,7 +64,7 @@ conda env update -f python/environment.yml

Start an sbt shell using the `sbt` command.

> **FYI**: The following SBT projects are built on Spark 3.5.1/Scala 2.12.15 by default. To change the Spark version and
> **FYI**: The following SBT projects are built on Spark 3.5.1/Scala 2.12.19 by default. To change the Spark version and
Scala version, set the environment variables `SPARK_VERSION` and `SCALA_VERSION`.

To compile the main code:
Expand Down
6 changes: 3 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import sbt.nio.Keys._

// Scala version used by DBR 13.3 LTS and 14.0
lazy val scala212 = "2.12.19"
lazy val scala213 = "2.13.12"
lazy val scala213 = "2.13.14"

lazy val spark3 = "3.5.1"
lazy val spark4 = "4.0.0-SNAPSHOT"
Expand Down Expand Up @@ -172,13 +172,13 @@ ThisBuild / testCoreDependencies := Seq(
"org.apache.spark" %% "spark-core" % sparkVersion.value % "test" classifier "tests",
"org.apache.spark" %% "spark-mllib" % sparkVersion.value % "test" classifier "tests",
"org.apache.spark" %% "spark-sql" % sparkVersion.value % "test" classifier "tests",
"org.xerial" % "sqlite-jdbc" % "3.45.3.0" % "test"
"org.xerial" % "sqlite-jdbc" % "3.45.2.0" % "test"
)

lazy val coreDependencies = settingKey[Seq[ModuleID]]("coreDependencies")
ThisBuild / coreDependencies := (providedSparkDependencies.value ++ testCoreDependencies.value ++ Seq(
"org.seqdoop" % "hadoop-bam" % "7.10.0",
"org.slf4j" % "slf4j-api" % "2.0.13",
"org.slf4j" % "slf4j-api" % "2.0.12",
"org.jdbi" % "jdbi" % "2.78",
"com.github.broadinstitute" % "picard" % "2.27.5",
"org.apache.commons" % "commons-lang3" % "3.14.0",
Expand Down
Loading

0 comments on commit b0b8530

Please sign in to comment.