diff --git a/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala b/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala index 4036b7a552..daac96a28e 100644 --- a/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala +++ b/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala @@ -123,13 +123,13 @@ class DeltaVariantSuite test("VariantType may not be used as a partition column") { withTable("delta_test") { checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql( """CREATE TABLE delta_test(s STRING, v VARIANT) |USING delta |PARTITIONED BY (v)""".stripMargin) }, - condition = "INVALID_PARTITION_COLUMN_DATA_TYPE", + "INVALID_PARTITION_COLUMN_DATA_TYPE", parameters = Map("type" -> "\"VARIANT\"") ) } @@ -516,7 +516,7 @@ class DeltaVariantSuite } checkError( insertException, - condition = "DELTA_NOT_NULL_CONSTRAINT_VIOLATED", + "DELTA_NOT_NULL_CONSTRAINT_VIOLATED", parameters = Map("columnName" -> "v") ) @@ -539,7 +539,7 @@ class DeltaVariantSuite } checkError( insertException, - condition = "DELTA_VIOLATE_CONSTRAINT_WITH_VALUES", + "DELTA_VIOLATE_CONSTRAINT_WITH_VALUES", parameters = Map( "constraintName" -> "variantgtezero", "expression" -> "(variant_get(v, '$', 'INT') >= 0)", "values" -> " - v : -1" diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala index 56488cf28f..84aebb3602 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala @@ -2592,8 +2592,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest dropCommand.run(spark) } checkError( - exception = e3, - condition = "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", + e3, + "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", parameters = Map( "feature" -> feature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -2707,8 +2707,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest command.run(spark) } checkError( - exception = e, - condition = "DELTA_FEATURE_DROP_NONREMOVABLE_FEATURE", + e, + "DELTA_FEATURE_DROP_NONREMOVABLE_FEATURE", parameters = Map("feature" -> TestWriterMetadataNoAutoUpdateFeature.name)) } } @@ -2730,8 +2730,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest command.run(spark) } checkError( - exception = e, - condition = "DELTA_FEATURE_DROP_NONREMOVABLE_FEATURE", + e, + "DELTA_FEATURE_DROP_NONREMOVABLE_FEATURE", parameters = Map("feature" -> AppendOnlyTableFeature.name)) } } @@ -2756,8 +2756,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest command.run(spark) } checkError( - exception = e, - condition = "DELTA_FEATURE_DROP_UNSUPPORTED_CLIENT_FEATURE", + e, + "DELTA_FEATURE_DROP_UNSUPPORTED_CLIENT_FEATURE", parameters = Map("feature" -> "NonSupportedFeature")) } } @@ -2782,8 +2782,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest command.run(spark) } checkError( - exception = e, - condition = "DELTA_FEATURE_DROP_FEATURE_NOT_PRESENT", + e, + "DELTA_FEATURE_DROP_FEATURE_NOT_PRESENT", parameters = Map("feature" -> TestRemovableWriterFeature.name)) } } @@ -2863,8 +2863,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableWriterFeature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_DEPENDENT_FEATURE", + e1, + "DELTA_FEATURE_DROP_DEPENDENT_FEATURE", parameters = Map( "feature" -> TestRemovableWriterFeature.name, "dependentFeatures" -> TestRemovableWriterFeatureWithDependency.name)) @@ -2904,8 +2904,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest |TRUNCATE HISTORY""".stripMargin) } checkError( - exception = e, - condition = "DELTA_FEATURE_DROP_HISTORY_TRUNCATION_NOT_ALLOWED", + e, + "DELTA_FEATURE_DROP_HISTORY_TRUNCATION_NOT_ALLOWED", parameters = Map.empty) } } @@ -2932,8 +2932,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableReaderWriterFeature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -2963,8 +2963,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest } checkError( - exception = e2, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e2, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -2991,8 +2991,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableReaderWriterFeature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3011,8 +3011,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableReaderWriterFeature.name).run(spark) } checkError( - exception = e2, - condition = "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", + e2, + "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3063,8 +3063,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableReaderWriterFeature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3086,8 +3086,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableReaderWriterFeature.name).run(spark) } checkError( - exception = e2, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e2, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3114,8 +3114,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableReaderWriterFeature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableReaderWriterFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3531,8 +3531,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest TestRemovableWriterWithHistoryTruncationFeature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TestRemovableWriterWithHistoryTruncationFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3611,8 +3611,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest dropV2CheckpointsTableFeature(spark, targetLog) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> V2CheckpointTableFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -3645,8 +3645,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest dropV2CheckpointsTableFeature(spark, targetLog) } checkError( - exception = e2, - condition = "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", + e2, + "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", parameters = Map( "feature" -> V2CheckpointTableFeature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala index 9284b27a57..ae2643d205 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala @@ -1605,8 +1605,8 @@ class CoordinatedCommitsSuite s"'${COORDINATED_COMMITS_COORDINATOR_CONF.key}')") } checkError( - exception = e, - condition = "DELTA_CANNOT_UNSET_COORDINATED_COMMITS_CONFS", + e, + "DELTA_CANNOT_UNSET_COORDINATED_COMMITS_CONFS", sqlState = "42616", parameters = Map[String, String]()) } @@ -1626,8 +1626,8 @@ class CoordinatedCommitsSuite s"('${IN_COMMIT_TIMESTAMPS_ENABLED.key}' = 'false')") } checkError( - exception = e, - condition = "DELTA_CANNOT_MODIFY_COORDINATED_COMMITS_DEPENDENCIES", + e, + "DELTA_CANNOT_MODIFY_COORDINATED_COMMITS_DEPENDENCIES", sqlState = "42616", parameters = Map("Command" -> "ALTER")) } @@ -1643,8 +1643,8 @@ class CoordinatedCommitsSuite s"'${IN_COMMIT_TIMESTAMPS_ENABLED.key}' = 'false')") } checkError( - exception = e, - condition = "DELTA_CANNOT_SET_COORDINATED_COMMITS_DEPENDENCIES", + e, + "DELTA_CANNOT_SET_COORDINATED_COMMITS_DEPENDENCIES", sqlState = "42616", parameters = Map("Command" -> "ALTER")) } @@ -1664,8 +1664,8 @@ class CoordinatedCommitsSuite s"('${IN_COMMIT_TIMESTAMPS_ENABLED.key}')") } checkError( - exception = e, - condition = "DELTA_CANNOT_MODIFY_COORDINATED_COMMITS_DEPENDENCIES", + e, + "DELTA_CANNOT_MODIFY_COORDINATED_COMMITS_DEPENDENCIES", sqlState = "42616", parameters = Map("Command" -> "ALTER")) } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsUtilsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsUtilsSuite.scala index a815ba9d9e..158fe6d5e6 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsUtilsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsUtilsSuite.scala @@ -85,8 +85,8 @@ class CoordinatedCommitsUtilsSuite extends QueryTest spark, propertyOverrides, tableExists, command) } checkError( - exception = e, - condition = errorOpt.get.getErrorClass, + e, + errorOpt.get.getErrorClass, sqlState = errorOpt.get.getSqlState, parameters = errorOpt.get.getMessageParameters.asScala.toMap) } else { @@ -260,8 +260,8 @@ class CoordinatedCommitsUtilsSuite extends QueryTest existingConfs, propertyOverrides) } checkError( - exception = e, - condition = errorOpt.get.getErrorClass, + e, + errorOpt.get.getErrorClass, sqlState = errorOpt.get.getSqlState, parameters = errorOpt.get.getMessageParameters.asScala.toMap) } else { @@ -328,8 +328,8 @@ class CoordinatedCommitsUtilsSuite extends QueryTest existingConfs, propKeysToUnset) } checkError( - exception = e, - condition = errorOpt.get.getErrorClass, + e, + errorOpt.get.getErrorClass, sqlState = errorOpt.get.getSqlState, parameters = errorOpt.get.getMessageParameters.asScala.toMap) } else { diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala index 8c19cc79ff..89b9b0cc17 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala @@ -1948,8 +1948,8 @@ class SchemaUtilsSuite extends QueryTest ) } checkError( - exception = exception, - condition = "DELTA_CANNOT_RESOLVE_COLUMN", + exception, + "DELTA_CANNOT_RESOLVE_COLUMN", sqlState = "42703", parameters = Map("columnName" -> "two", "schema" -> tableSchema.treeString) ) @@ -1974,8 +1974,8 @@ class SchemaUtilsSuite extends QueryTest ) } checkError( - exception = exception, - condition = "DELTA_CANNOT_RESOLVE_COLUMN", + exception, + "DELTA_CANNOT_RESOLVE_COLUMN", sqlState = "42703", parameters = Map("columnName" -> "s.two", "schema" -> tableSchema.treeString) ) @@ -2348,8 +2348,8 @@ class SchemaUtilsSuite extends QueryTest mergeSchemas(longType, sourceType) } checkError( - exception = e.getCause.asInstanceOf[AnalysisException], - condition = "DELTA_MERGE_INCOMPATIBLE_DATATYPE", + e.getCause.asInstanceOf[AnalysisException], + "DELTA_MERGE_INCOMPATIBLE_DATATYPE", parameters = Map("currentDataType" -> "LongType", "updateDataType" -> sourceType.head.dataType.toString)) } @@ -2637,10 +2637,10 @@ class SchemaUtilsSuite extends QueryTest badCharacters.foreach { char => Seq(s"a${char}b", s"${char}ab", s"ab${char}", char.toString).foreach { name => checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { SchemaUtils.checkFieldNames(Seq(name)) }, - condition = "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAME", + "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAME", parameters = Map("columnName" -> s"$name") ) } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala index 2acf0205ab..a2fe9f5bd8 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala @@ -1169,7 +1169,7 @@ trait ClusteredTableDDLDataSourceV2SuiteBase } checkError( e, - condition = "DELTA_CREATE_TABLE_WITH_DIFFERENT_CLUSTERING", + "DELTA_CREATE_TABLE_WITH_DIFFERENT_CLUSTERING", parameters = Map( "path" -> dir.toURI.toString.stripSuffix("/"), "specifiedColumns" -> "", @@ -1194,7 +1194,7 @@ trait ClusteredTableDDLDataSourceV2SuiteBase } checkError( e, - condition = "DELTA_CREATE_TABLE_WITH_DIFFERENT_CLUSTERING", + "DELTA_CREATE_TABLE_WITH_DIFFERENT_CLUSTERING", parameters = Map( "path" -> dir.toURI.toString.stripSuffix("/"), "specifiedColumns" -> "col2", @@ -1235,7 +1235,7 @@ trait ClusteredTableDDLDataSourceV2SuiteBase } checkError( e, - condition = "DELTA_CREATE_TABLE_WITH_DIFFERENT_CLUSTERING", + "DELTA_CREATE_TABLE_WITH_DIFFERENT_CLUSTERING", parameters = Map( "path" -> dir.toURI.toString.stripSuffix("/"), "specifiedColumns" -> "col1",