Skip to content

Commit

Permalink
merged with master
Browse files Browse the repository at this point in the history
  • Loading branch information
ilicmarkodb committed Sep 24, 2024
2 parents bd62e3d + a8cc4b4 commit 908750d
Show file tree
Hide file tree
Showing 201 changed files with 8,024 additions and 2,452 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/connectors_test.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: "Delta Connectors Tests"
name: "Delta Connectors"
on: [push, pull_request]
jobs:
build:
name: "Run tests"
name: "DC: Scala ${{ matrix.scala }}"
runs-on: ubuntu-20.04
strategy:
matrix:
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/kernel_test.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
name: "Delta Kernel Tests"
name: "Delta Kernel"
on: [push, pull_request]
jobs:
test:
name: "DK"
runs-on: ubuntu-20.04
env:
SCALA_VERSION: 2.12.18
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/spark_examples_test.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
name: "Delta Spark Local Publishing and Examples Compilation"
name: "Delta Spark Publishing and Examples"
on: [push, pull_request]
jobs:
test:
name: "DSP&E: Scala ${{ matrix.scala }}"
runs-on: ubuntu-20.04
strategy:
matrix:
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/spark_master_test.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
name: "Delta Spark Master Tests"
name: "Delta Spark Master"
on: [push, pull_request]
jobs:
test:
name: "DSM: Scala ${{ matrix.scala }}, Shard ${{ matrix.shard }}"
runs-on: ubuntu-20.04
strategy:
matrix:
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/spark_python_test.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
name: "Delta Spark Python Tests"
name: "Delta Spark Python"
on: [push, pull_request]
jobs:
test:
name: "DSP"
runs-on: ubuntu-20.04
strategy:
matrix:
Expand Down Expand Up @@ -60,7 +61,7 @@ jobs:
# `-SNAPSHOT` in version (e.g. `3.3.0-SNAPSHOT`) as the version is picked up from
# the`version.sbt` file.
pipenv run pip install pip==24.0 setuptools==69.5.1 wheel==0.43.0
pipenv run pip install pyspark==3.5.0
pipenv run pip install pyspark==3.5.3
pipenv run pip install flake8==3.5.0 pypandoc==1.3.3
pipenv run pip install black==23.9.1
pipenv run pip install importlib_metadata==3.10.0
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/spark_test.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
name: "Delta Spark Tests"
name: "Delta Spark Latest"
on: [push, pull_request]
jobs:
test:
name: "DSL: Scala ${{ matrix.scala }}, Shard ${{ matrix.shard }}"
runs-on: ubuntu-20.04
strategy:
matrix:
Expand Down Expand Up @@ -64,7 +65,7 @@ jobs:
# `-SNAPSHOT` in version (e.g. `3.3.0-SNAPSHOT`) as the version is picked up from
# the`version.sbt` file.
pipenv run pip install pip==24.0 setuptools==69.5.1 wheel==0.43.0
pipenv run pip install pyspark==3.5.2
pipenv run pip install pyspark==3.5.3
pipenv run pip install flake8==3.5.0 pypandoc==1.3.3
pipenv run pip install black==23.9.1
pipenv run pip install importlib_metadata==3.10.0
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/unidoc.yaml
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
name: "Unidoc generation"
name: "Unidoc"
on: [push, pull_request]
jobs:
build:
name: "Generate unidoc"
name: "U: Scala ${{ matrix.scala }}"
runs-on: ubuntu-20.04
strategy:
matrix:
# These Scala versions must match those in the build.sbt
# These Scala versions must match those in the build.sbt
scala: [2.13.13, 2.12.18]
steps:
- name: install java
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ RUN pip3 install --upgrade pip
# the`version.sbt` file.
RUN pip install pip==24.0 setuptools==69.5.1 wheel==0.43.0

RUN pip3 install pyspark==3.5.2
RUN pip3 install pyspark==3.5.3

RUN pip3 install mypy==0.982

Expand Down
2 changes: 1 addition & 1 deletion benchmarks/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ scalaVersion := "2.12.18"
lazy val root = (project in file("."))
.settings(
name := "benchmarks",
libraryDependencies += "org.apache.spark" %% "spark-sql" % "3.5.2" % "provided",
libraryDependencies += "org.apache.spark" %% "spark-sql" % "3.5.3" % "provided",
libraryDependencies += "com.github.scopt" %% "scopt" % "4.0.1",
libraryDependencies += "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.13.1",

Expand Down
8 changes: 5 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ val all_scala_versions = Seq(scala212, scala213)
val default_scala_version = settingKey[String]("Default Scala version")
Global / default_scala_version := scala212

val LATEST_RELEASED_SPARK_VERSION = "3.5.2"
val LATEST_RELEASED_SPARK_VERSION = "3.5.3"
val SPARK_MASTER_VERSION = "4.0.0-SNAPSHOT"
val sparkVersion = settingKey[String]("Spark version")
spark / sparkVersion := getSparkVersion()
Expand Down Expand Up @@ -176,6 +176,7 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / "src" / "main" / "scala-spark-3.5",
Test / unmanagedSourceDirectories += (Test / baseDirectory).value / "src" / "test" / "scala-spark-3.5",
Antlr4 / antlr4Version := "4.9.3",
Test / javaOptions ++= Seq("-Dlog4j.configurationFile=log4j2.properties"),

// Java-/Scala-/Uni-Doc Settings
scalacOptions ++= Seq(
Expand Down Expand Up @@ -204,8 +205,9 @@ def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
)
"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED",
"-Dlog4j.configurationFile=log4j2_spark_master.properties"
),

// Java-/Scala-/Uni-Doc Settings
// This isn't working yet against Spark Master.
Expand Down
16 changes: 0 additions & 16 deletions connectors/.github/workflows/new_pull_request.yaml

This file was deleted.

19 changes: 0 additions & 19 deletions connectors/.github/workflows/new_updated_issue.yaml

This file was deleted.

43 changes: 0 additions & 43 deletions connectors/.github/workflows/test.yaml

This file was deleted.

20 changes: 0 additions & 20 deletions connectors/.github/workflows/updated_pull_request.yaml

This file was deleted.

38 changes: 0 additions & 38 deletions connectors/dev/README.md

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import org.apache.flink.streaming.api.functions.sink.filesystem.BucketAssigner;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.conversion.DateDateConverter;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.LogicalTypeRoot;
import org.apache.flink.table.types.logical.RowType;
Expand Down Expand Up @@ -99,6 +100,10 @@ public LinkedHashMap<String, String> generatePartitionValues(
partitionValues.put(partitionKey, String.valueOf(element.getShort(keyIndex)));
} else if (keyType.getTypeRoot() == LogicalTypeRoot.TINYINT) {
partitionValues.put(partitionKey, String.valueOf(element.getByte(keyIndex)));
} else if (keyType.getTypeRoot() == LogicalTypeRoot.DATE) {
DateDateConverter converter = new DateDateConverter();
String value = String.valueOf(converter.toExternal(element.getInt(keyIndex)));
partitionValues.put(partitionKey, value);
} else {
throw new RuntimeException("Type not supported " + keyType.getTypeRoot());
}
Expand Down
Loading

0 comments on commit 908750d

Please sign in to comment.