diff --git a/spark/src/main/scala/org/apache/spark/sql/delta/sources/DeltaSQLConf.scala b/spark/src/main/scala/org/apache/spark/sql/delta/sources/DeltaSQLConf.scala index aa46bd9bd0..9d57ba2c00 100644 --- a/spark/src/main/scala/org/apache/spark/sql/delta/sources/DeltaSQLConf.scala +++ b/spark/src/main/scala/org/apache/spark/sql/delta/sources/DeltaSQLConf.scala @@ -1561,7 +1561,9 @@ trait DeltaSQLConfBase { |The casting behavior is governed by 'spark.sql.storeAssignmentPolicy'. |""".stripMargin) .booleanConf - .createWithDefault(true) + // This feature doesn't properly support structs with missing fields and is disabled until a + // fix is implemented. + .createWithDefault(false) val DELTA_CDF_UNSAFE_BATCH_READ_ON_INCOMPATIBLE_SCHEMA_CHANGES = buildConf("changeDataFeed.unsafeBatchReadOnIncompatibleSchemaChanges.enabled") diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala index e6e965e0b8..c014feb228 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.delta import org.apache.spark.sql.delta.sources.DeltaSQLConf - import org.apache.spark.sql.SaveMode +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ /** @@ -32,6 +32,12 @@ import org.apache.spark.sql.types._ */ class DeltaInsertIntoImplicitCastSuite extends DeltaInsertIntoTest { + override def beforeAll(): Unit = { + super.beforeAll() + spark.conf.set(DeltaSQLConf.DELTA_STREAMING_SINK_ALLOW_IMPLICIT_CASTS.key, "true") + spark.conf.set(SQLConf.ANSI_ENABLED.key, "true") + } + for (schemaEvolution <- BOOLEAN_DOMAIN) { testInserts("insert with implicit up and down cast on top-level fields, " + s"schemaEvolution=$schemaEvolution")(