Nightly tests #101
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Nightly tests | |
on: | |
schedule: | |
# Run every week | |
- cron: 30 12 * * WED | |
workflow_dispatch: | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} | |
cancel-in-progress: true | |
env: | |
DEFAULT_PYTHON: '3.12' | |
jobs: | |
get-matrix: | |
name: Get version matrix for nightly tests | |
uses: ./.github/workflows/get-matrix.yml | |
with: | |
nightly: true | |
tests-core: | |
name: Run core tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-core) }} | |
uses: ./.github/workflows/test-core.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-clickhouse: | |
name: Run Clickhouse tests (server=${{ matrix.clickhouse-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-clickhouse) }} | |
uses: ./.github/workflows/test-clickhouse.yml | |
with: | |
clickhouse-image: ${{ matrix.clickhouse-image }} | |
clickhouse-version: ${{ matrix.clickhouse-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-greenplum: | |
name: Run Greenplum tests (server=${{ matrix.greenplum-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-greenplum) }} | |
uses: ./.github/workflows/test-greenplum.yml | |
with: | |
greenplum-version: ${{ matrix.greenplum-version }} | |
spark-version: ${{ matrix.spark-version }} | |
package-version: ${{ matrix.package-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
secrets: | |
GREENPLUM_PACKAGES_USER: ${{ secrets.GREENPLUM_PACKAGES_USER }} | |
GREENPLUM_PACKAGES_PASSWORD: ${{ secrets.GREENPLUM_PACKAGES_PASSWORD }} | |
tests-hive: | |
name: Run Hive tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-hive) }} | |
uses: ./.github/workflows/test-hive.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-kafka: | |
name: Run Kafka tests (server=${{ matrix.kafka-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-kafka) }} | |
uses: ./.github/workflows/test-kafka.yml | |
with: | |
kafka-version: ${{ matrix.kafka-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-local-fs: | |
name: Run LocalFS tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-local-fs) }} | |
uses: ./.github/workflows/test-local-fs.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-mongodb: | |
name: Run MongoDB tests (server=${{ matrix.mongodb-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-mongodb) }} | |
uses: ./.github/workflows/test-mongodb.yml | |
with: | |
mongodb-version: ${{ matrix.mongodb-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-mssql: | |
name: Run MSSQL tests (server=${{ matrix.mssql-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-mssql) }} | |
uses: ./.github/workflows/test-mssql.yml | |
with: | |
mssql-version: ${{ matrix.mssql-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-mysql: | |
name: Run MySQL tests (server=${{ matrix.mysql-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-mysql) }} | |
uses: ./.github/workflows/test-mysql.yml | |
with: | |
mysql-version: ${{ matrix.mysql-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-oracle: | |
name: Run Oracle tests (server=${{ matrix.oracle-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-oracle) }} | |
uses: ./.github/workflows/test-oracle.yml | |
with: | |
oracle-image: ${{ matrix.oracle-image }} | |
oracle-version: ${{ matrix.oracle-version }} | |
db-name: ${{ matrix.db-name }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-postgres: | |
name: Run Postgres tests (server=${{ matrix.postgres-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-postgres) }} | |
uses: ./.github/workflows/test-postgres.yml | |
with: | |
postgres-version: ${{ matrix.postgres-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-teradata: | |
name: Run Teradata tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-teradata) }} | |
uses: ./.github/workflows/test-teradata.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-ftp: | |
name: Run FTP tests (server=${{ matrix.ftp-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-ftp) }} | |
uses: ./.github/workflows/test-ftp.yml | |
with: | |
ftp-version: ${{ matrix.ftp-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-ftps: | |
name: Run FTPS tests (server=${{ matrix.ftps-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-ftps) }} | |
uses: ./.github/workflows/test-ftps.yml | |
with: | |
ftps-version: ${{ matrix.ftps-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-hdfs: | |
name: Run HDFS tests (server=${{ matrix.hadoop-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-hdfs) }} | |
uses: ./.github/workflows/test-hdfs.yml | |
with: | |
hadoop-version: ${{ matrix.hadoop-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-s3: | |
name: Run S3 tests (server=${{ matrix.minio-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-s3) }} | |
uses: ./.github/workflows/test-s3.yml | |
with: | |
minio-version: ${{ matrix.minio-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-sftp: | |
name: Run SFTP tests (server=${{ matrix.openssh-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-sftp) }} | |
uses: ./.github/workflows/test-sftp.yml | |
with: | |
openssh-version: ${{ matrix.openssh-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-samba: | |
name: Run Samba tests (server=${{ matrix.server-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-samba) }} | |
uses: ./.github/workflows/test-samba.yml | |
with: | |
server-version: ${{ matrix.server-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
tests-webdav: | |
name: Run WebDAV tests (server=${{ matrix.openwebdavssh-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-webdav) }} | |
uses: ./.github/workflows/test-webdav.yml | |
with: | |
webdav-version: ${{ matrix.webdav-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
with-cache: false | |
all_done: | |
name: Tests done | |
runs-on: ubuntu-latest | |
needs: | |
- tests-core | |
- tests-clickhouse | |
- tests-hive | |
- tests-kafka | |
- tests-local-fs | |
- tests-mongodb | |
- tests-mssql | |
- tests-mysql | |
- tests-oracle | |
- tests-postgres | |
- tests-teradata | |
- tests-ftp | |
- tests-ftps | |
- tests-hdfs | |
- tests-s3 | |
- tests-sftp | |
- tests-samba | |
- tests-webdav | |
steps: | |
# Do not send coverage report. Nightly tests are used only to find bugs with new versions of DB and filesystems. | |
- name: All done | |
run: echo 1 |