diff --git a/spark/src/main/scala/org/apache/spark/sql/delta/commands/CreateDeltaTableCommand.scala b/spark/src/main/scala/org/apache/spark/sql/delta/commands/CreateDeltaTableCommand.scala index c3cb0731db..644edbe135 100644 --- a/spark/src/main/scala/org/apache/spark/sql/delta/commands/CreateDeltaTableCommand.scala +++ b/spark/src/main/scala/org/apache/spark/sql/delta/commands/CreateDeltaTableCommand.scala @@ -732,7 +732,7 @@ case class CreateDeltaTableCommand( if (txn.readVersion > -1L && isReplace && !dontOverwriteSchema) { // When a table already exists, and we're using the DataFrameWriterV2 API to replace // or createOrReplace a table, we blindly overwrite the metadata. - val newMetadata = getProvidedMetadata(table, schema.json) + var newMetadata = getProvidedMetadata(table, schema.json) val updatedConfig = UniversalFormat.enforceDependenciesInConfiguration( newMetadata.configuration, txn.snapshot)