Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/3.4+' into 3.4+
Browse files Browse the repository at this point in the history
  • Loading branch information
Jolanrensen committed Mar 25, 2024
2 parents 24952b7 + 5d5fe84 commit ffedb29
Show file tree
Hide file tree
Showing 5 changed files with 4 additions and 5 deletions.
1 change: 0 additions & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ jobs:
- name: Publish plugins to maven local
run: >
./gradlew
clean
compiler-plugin:publishToMavenLocal
gradle-plugin:publishToMavenLocal
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ inline fun <reified T> List<T>.toDF(spark: SparkSession, vararg colNames: String
* Utility method to create dataset from list
*/
inline fun <reified T> Array<T>.toDS(spark: SparkSession): Dataset<T> =
toList().toDS(spark)
asList().toDS(spark)

/**
* Utility method to create dataframe from list
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import java.io.Serializable
fun <T> JavaSparkContext.rddOf(
vararg elements: T,
numSlices: Int = defaultParallelism(),
): JavaRDD<T> = parallelize(elements.toList(), numSlices)
): JavaRDD<T> = parallelize(elements.asList(), numSlices)

/**
* Utility method to create an RDD from a list.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ class KSparkSession(val spark: SparkSession) {
* NOTE: [T] must be [Serializable].
*/
fun <T> rddOf(vararg elements: T, numSlices: Int = sc.defaultParallelism()): JavaRDD<T> =
sc.toRDD(elements.toList(), numSlices)
sc.toRDD(elements.asList(), numSlices)

/**
* A collection of methods for registering user-defined functions (UDF).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ class TypeInferenceTest : ShouldSpec({
should("generate valid serializer schema") {
expect(schemaFor<Sample>() as org.apache.spark.sql.types.StructType) {
this
.feature("data type", { this.fields()?.toList() }) {
.feature("data type", { this.fields()?.asList() }) {
this.notToEqualNull().toContain.inOrder.only.entry {
this
.feature("element name", { name() }) { toEqual("optionList") }
Expand Down

0 comments on commit ffedb29

Please sign in to comment.