From 8ee008fed03532c7076bcde5a2d9e3eec4ddab67 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 17 Sep 2024 19:17:02 +0200 Subject: [PATCH] Pass as positional parameters --- .../spark/sql/delta/DeltaVariantSuite.scala | 2 +- .../sql/parser/DeltaSqlParserSuite.scala | 12 ++-- .../delta/tables/DeltaTableBuilderSuite.scala | 5 +- .../sql/delta/DeltaAlterTableTests.scala | 56 +++++++++---------- .../spark/sql/delta/DeltaCDCSQLSuite.scala | 4 +- .../sql/delta/DeltaColumnMappingSuite.scala | 5 +- .../delta/DeltaDataFrameWriterV2Suite.scala | 4 +- .../sql/delta/DeltaDropColumnSuite.scala | 4 +- .../spark/sql/delta/DeltaErrorsSuite.scala | 32 +++++------ .../DeltaInsertIntoImplicitCastSuite.scala | 6 +- .../sql/delta/DeltaInsertIntoTableSuite.scala | 36 ++++++------ .../sql/delta/DeltaProtocolVersionSuite.scala | 8 +-- .../delta/DeltaSinkImplicitCastSuite.scala | 16 +++--- .../spark/sql/delta/DeltaSinkSuite.scala | 8 +-- .../apache/spark/sql/delta/DeltaSuite.scala | 4 +- .../sql/delta/DeltaTableCreationTests.scala | 4 +- .../sql/delta/GeneratedColumnSuite.scala | 8 +-- .../sql/delta/InCommitTimestampSuite.scala | 8 +-- .../sql/delta/SchemaValidationSuite.scala | 8 +-- .../spark/sql/delta/UpdateSQLSuite.scala | 10 ++-- .../DropColumnMappingFeatureSuite.scala | 9 ++- .../CoordinatedCommitsSuite.scala | 6 +- .../rowtracking/MaterializedColumnSuite.scala | 4 +- .../RowTrackingReadWriteSuite.scala | 8 +-- .../delta/schema/CheckConstraintsSuite.scala | 12 ++-- .../sql/delta/schema/SchemaUtilsSuite.scala | 8 +-- .../clustering/ClusteredTableDDLSuite.scala | 44 +++++++-------- .../TypeWideningAlterTableNestedSuite.scala | 12 ++-- .../TypeWideningAlterTableSuite.scala | 12 ++-- .../TypeWideningConstraintsSuite.scala | 20 +++---- ...ypeWideningFeatureCompatibilitySuite.scala | 8 +-- .../TypeWideningGeneratedColumnsSuite.scala | 20 +++---- .../TypeWideningTableFeatureSuite.scala | 20 +++---- .../typewidening/TypeWideningTestMixin.scala | 12 ++-- 34 files changed, 215 insertions(+), 220 deletions(-) diff --git a/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala b/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala index 614b9a09d4..4036b7a552 100644 --- a/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala +++ b/spark/src/test/scala-spark-master/org/apache/spark/sql/delta/DeltaVariantSuite.scala @@ -100,7 +100,7 @@ class DeltaVariantSuite // check previously thrown error message checkError( e, - condition = "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT", + "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT", parameters = Map( "unsupportedFeatures" -> VariantTypeTableFeature.name, "supportedFeatures" -> currentFeatures diff --git a/spark/src/test/scala/io/delta/sql/parser/DeltaSqlParserSuite.scala b/spark/src/test/scala/io/delta/sql/parser/DeltaSqlParserSuite.scala index f8fc23c768..4934a1d884 100644 --- a/spark/src/test/scala/io/delta/sql/parser/DeltaSqlParserSuite.scala +++ b/spark/src/test/scala/io/delta/sql/parser/DeltaSqlParserSuite.scala @@ -463,9 +463,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper { val parser = new DeltaSqlParser(new SparkSqlParser()) val sql = clusterByStatement(clause, asSelect, "a int, b string", "CLUSTER BY (a) CLUSTER BY (b)") - checkError(exception = intercept[ParseException] { + checkError(intercept[ParseException] { parser.parsePlan(sql) - }, condition = "DUPLICATE_CLAUSES", parameters = Map("clauseName" -> "CLUSTER BY")) + }, "DUPLICATE_CLAUSES", parameters = Map("clauseName" -> "CLUSTER BY")) } test("CLUSTER BY set clustering column property is ignored - " + @@ -492,9 +492,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper { "CLUSTER BY (a) PARTITIONED BY (b)") val errorMsg = "Clustering and partitioning cannot both be specified. " + "Please remove PARTITIONED BY if you want to create a Delta table with clustering" - checkError(exception = intercept[ParseException] { + checkError(intercept[ParseException] { parser.parsePlan(sql) - }, condition = "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg)) + }, "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg)) } test(s"CLUSTER BY with bucketing - $clause TABLE asSelect = $asSelect") { @@ -508,9 +508,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper { val errorMsg = "Clustering and bucketing cannot both be specified. " + "Please remove CLUSTERED BY INTO BUCKETS if you " + "want to create a Delta table with clustering" - checkError(exception = intercept[ParseException] { + checkError(intercept[ParseException] { parser.parsePlan(sql) - }, condition = "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg)) + }, "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg)) } } } diff --git a/spark/src/test/scala/io/delta/tables/DeltaTableBuilderSuite.scala b/spark/src/test/scala/io/delta/tables/DeltaTableBuilderSuite.scala index 98ff740479..4cf9f43f55 100644 --- a/spark/src/test/scala/io/delta/tables/DeltaTableBuilderSuite.scala +++ b/spark/src/test/scala/io/delta/tables/DeltaTableBuilderSuite.scala @@ -492,10 +492,7 @@ class DeltaTableBuilderSuite .execute() } - checkError( - exception = e, - condition = "DELTA_CLUSTER_BY_WITH_PARTITIONED_BY" - ) + checkError(e, "DELTA_CLUSTER_BY_WITH_PARTITIONED_BY") } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaAlterTableTests.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaAlterTableTests.scala index 191c70ac76..fa966b3622 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaAlterTableTests.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaAlterTableTests.scala @@ -924,17 +924,17 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", map('v1, 'v2)) withDeltaTable(df) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN a.key COMMENT 'a comment'") }, - condition = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY", + "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY", parameters = Map("fieldPath" -> "a.key") ) checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN a.value COMMENT 'a comment'") }, - condition = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY", + "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY", parameters = Map("fieldPath" -> "a.value") ) } @@ -945,10 +945,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", array('v1)) withDeltaTable(df) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN a.element COMMENT 'a comment'") }, - condition = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY", + "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY", parameters = Map("fieldPath" -> "a.element") ) } @@ -959,20 +959,20 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", map('v1, 'v2)) withDeltaTable(df) { tableName => checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE $tableName RENAME COLUMN a.key TO key2") }, - condition = "INVALID_FIELD_NAME", + "INVALID_FIELD_NAME", parameters = Map( "fieldName" -> "`a`.`key2`", "path" -> "`a`" ) ) checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE $tableName RENAME COLUMN a.value TO value2") }, - condition = "INVALID_FIELD_NAME", + "INVALID_FIELD_NAME", parameters = Map( "fieldName" -> "`a`.`value2`", "path" -> "`a`" @@ -986,10 +986,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", array('v1)) withDeltaTable(df) { tableName => checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE $tableName RENAME COLUMN a.element TO element2") }, - condition = "INVALID_FIELD_NAME", + "INVALID_FIELD_NAME", parameters = Map( "fieldName" -> "`a`.`element2`", "path" -> "`a`" @@ -1008,10 +1008,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { ddlTest("CHANGE COLUMN - incompatible") { withDeltaTable(Seq((1, "a"), (2, "b")).toDF("v1", "v2")) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN v1 v1 long") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "v1", "oldField" -> "INT", @@ -1026,10 +1026,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("struct", struct("v1", "v2")) withDeltaTable(df) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN struct.v1 v1 long") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "struct.v1", "oldField" -> "INT", @@ -1044,10 +1044,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", map('v1, 'v2)) withDeltaTable(df) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN a.key key long") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "a.key", "oldField" -> "INT NOT NULL", @@ -1062,10 +1062,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", map('v1, 'v2)) withDeltaTable(df) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN a.value value long") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "a.value", "oldField" -> "INT", @@ -1080,10 +1080,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { .withColumn("a", array('v1)) withDeltaTable(df) { tableName => checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"ALTER TABLE $tableName CHANGE COLUMN a.element element long") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "a.element", "oldField" -> "INT", @@ -1383,8 +1383,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { // Changing the nullability of map/array fields is not allowed. var statement = s"ALTER TABLE $tableName CHANGE COLUMN m.key DROP NOT NULL" checkError( - exception = intercept[AnalysisException] { sql(statement) }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + intercept[AnalysisException] { sql(statement) }, + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "m.key", "oldField" -> "INT NOT NULL", @@ -1394,8 +1394,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { statement = s"ALTER TABLE $tableName CHANGE COLUMN m.value SET NOT NULL" checkError( - exception = intercept[AnalysisException] { sql(statement) }, - condition = "_LEGACY_ERROR_TEMP_2330", + intercept[AnalysisException] { sql(statement) }, + "_LEGACY_ERROR_TEMP_2330", parameters = Map( "fieldName" -> "m.value" ), @@ -1404,8 +1404,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase { statement = s"ALTER TABLE $tableName CHANGE COLUMN a.element SET NOT NULL" checkError( - exception = intercept[AnalysisException] { sql(statement) }, - condition = "_LEGACY_ERROR_TEMP_2330", + intercept[AnalysisException] { sql(statement) }, + "_LEGACY_ERROR_TEMP_2330", parameters = Map( "fieldName" -> "a.element" ), diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaCDCSQLSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaCDCSQLSuite.scala index 6894802ff2..fd1d6c820c 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaCDCSQLSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaCDCSQLSuite.scala @@ -289,10 +289,10 @@ class DeltaCDCSQLSuite extends DeltaCDCSuiteBase with DeltaColumnMappingTestUtil withTable(tbl) { spark.range(10).write.format("delta").saveAsTable(tbl) checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"SELECT * FROM table_changes('$tbl', 0, id)") }, - condition = "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION", + "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION", parameters = Map("objectName" -> "`id`"), queryContext = Array(ExpectedContext( fragment = "id", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaColumnMappingSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaColumnMappingSuite.scala index 2705dd839a..7eb0e1d4b4 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaColumnMappingSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaColumnMappingSuite.scala @@ -1944,10 +1944,9 @@ class DeltaColumnMappingSuite extends QueryTest |TBLPROPERTIES('${DeltaConfigs.COLUMN_MAPPING_MODE.key}'='none') |""".stripMargin) } - val errorClass = "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAMES" checkError( - exception = e, - condition = errorClass, + e, + "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAMES", parameters = DeltaThrowableHelper .getParameterNames(errorClass, errorSubClass = null) .zip(invalidColumns).toMap diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDataFrameWriterV2Suite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDataFrameWriterV2Suite.scala index 55f51acd55..9c579f0d52 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDataFrameWriterV2Suite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDataFrameWriterV2Suite.scala @@ -677,8 +677,8 @@ class DeltaDataFrameWriterV2Suite def verifyNotImplicitCasting(f: => Unit): Unit = { val e = intercept[DeltaAnalysisException](f) checkError( - exception = e.getCause.asInstanceOf[DeltaAnalysisException], - condition = "DELTA_MERGE_INCOMPATIBLE_DATATYPE", + e.getCause.asInstanceOf[DeltaAnalysisException], + "DELTA_MERGE_INCOMPATIBLE_DATATYPE", parameters = Map("currentDataType" -> "LongType", "updateDataType" -> "IntegerType")) } verifyNotImplicitCasting { diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDropColumnSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDropColumnSuite.scala index 9f22dc851b..ece065f4ab 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDropColumnSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaDropColumnSuite.scala @@ -450,10 +450,10 @@ class DeltaDropColumnSuite extends QueryTest field <- Seq("m.key", "m.value", "a.element") } checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE delta_test DROP COLUMN $field") }, - condition = "DELTA_UNSUPPORTED_DROP_NESTED_COLUMN_FROM_NON_STRUCT_TYPE", + "DELTA_UNSUPPORTED_DROP_NESTED_COLUMN_FROM_NON_STRUCT_TYPE", parameters = Map( "struct" -> "IntegerType" ) diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala index 322e951556..c94b4a884d 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala @@ -477,12 +477,12 @@ trait DeltaErrorsSuiteBase Some(s"Delta table $table doesn't exist.")) } checkError( - exception = intercept[DeltaIllegalStateException] { + intercept[DeltaIllegalStateException] { throw DeltaErrors.differentDeltaTableReadByStreamingSource( newTableId = "027fb01c-94aa-4cab-87cb-5aab6aec6d17", oldTableId = "2edf2c02-bb63-44e9-a84c-517fad0db296") }, - condition = "DIFFERENT_DELTA_TABLE_READ_BY_STREAMING_SOURCE", + "DIFFERENT_DELTA_TABLE_READ_BY_STREAMING_SOURCE", parameters = Map( "oldTableId" -> "2edf2c02-bb63-44e9-a84c-517fad0db296", "newTableId" -> "027fb01c-94aa-4cab-87cb-5aab6aec6d17") @@ -961,12 +961,12 @@ trait DeltaErrorsSuiteBase SchemaMergingUtils.mergeSchemas(s1, s2) } checkError( - exception = e, - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + e, + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map("currentField" -> "c0", "updateField" -> "c0")) checkError( - exception = e.getCause.asInstanceOf[DeltaAnalysisException], - condition = "DELTA_MERGE_INCOMPATIBLE_DATATYPE", + e.getCause.asInstanceOf[DeltaAnalysisException], + "DELTA_MERGE_INCOMPATIBLE_DATATYPE", parameters = Map("currentDataType" -> "IntegerType", "updateDataType" -> "StringType")) } { @@ -997,13 +997,13 @@ trait DeltaErrorsSuiteBase } { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { throw DeltaErrors.alterTableChangeColumnException( fieldPath = "a.b.c", oldField = StructField("c", IntegerType), newField = StructField("c", LongType)) }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "a.b.c", "oldField" -> "INT", @@ -1421,14 +1421,14 @@ trait DeltaErrorsSuiteBase } { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { throw DeltaErrors.constraintDataTypeMismatch( columnPath = Seq("a", "x"), columnType = ByteType, dataType = IntegerType, constraints = Map("ck1" -> "a > 0", "ck2" -> "hash(b) > 0")) }, - condition = "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", + "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a.x", "columnType" -> "TINYINT", @@ -1438,7 +1438,7 @@ trait DeltaErrorsSuiteBase } { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { throw DeltaErrors.generatedColumnsDataTypeMismatch( columnPath = Seq("a", "x"), columnType = ByteType, @@ -1448,7 +1448,7 @@ trait DeltaErrorsSuiteBase "gen2" -> "3 + a . x" )) }, - condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", + "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a.x", "columnType" -> "TINYINT", @@ -1916,10 +1916,10 @@ trait DeltaErrorsSuiteBase } { checkError( - exception = intercept[DeltaIllegalStateException] { + intercept[DeltaIllegalStateException] { throw MaterializedRowId.missingMetadataException("table_name") }, - condition = "DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING", + "DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING", parameters = Map( "rowTrackingColumn" -> "Row ID", "tableName" -> "table_name" @@ -1928,10 +1928,10 @@ trait DeltaErrorsSuiteBase } { checkError( - exception = intercept[DeltaIllegalStateException] { + intercept[DeltaIllegalStateException] { throw MaterializedRowCommitVersion.missingMetadataException("table_name") }, - condition = "DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING", + "DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING", parameters = Map( "rowTrackingColumn" -> "Row Commit Version", "tableName" -> "table_name" diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala index c3cc20a6af..e6e965e0b8 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoImplicitCastSuite.scala @@ -71,7 +71,7 @@ class DeltaInsertIntoImplicitCastSuite extends DeltaInsertIntoTest { expectedResult = ExpectedResult.Failure(ex => { checkError( ex, - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map( "currentField" -> "a", "updateField" -> "a" @@ -129,7 +129,7 @@ class DeltaInsertIntoImplicitCastSuite extends DeltaInsertIntoTest { expectedResult = ExpectedResult.Failure(ex => { checkError( ex, - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map( "currentField" -> "a", "updateField" -> "a" @@ -187,7 +187,7 @@ class DeltaInsertIntoImplicitCastSuite extends DeltaInsertIntoTest { expectedResult = ExpectedResult.Failure(ex => { checkError( ex, - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map( "currentField" -> "m", "updateField" -> "m" diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoTableSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoTableSuite.scala index 9bc2df9fdf..18acbc09e0 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoTableSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaInsertIntoTableSuite.scala @@ -213,10 +213,10 @@ class DeltaInsertIntoSQLSuite withTable("t") { sql(s"CREATE TABLE t(i STRING, c string) USING $v2Format PARTITIONED BY (c)") checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql("INSERT OVERWRITE t PARTITION (c='1') (c) VALUES ('2')") }, - condition = "STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST", + "STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST", parameters = Map("staticName" -> "c")) } } @@ -596,22 +596,22 @@ class DeltaColumnDefaultsInsertSuite extends InsertIntoSQLOnlyTests with DeltaSQ // The table feature is not enabled via TBLPROPERTIES. withTable("createTableWithDefaultFeatureNotEnabled") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(s"create table createTableWithDefaultFeatureNotEnabled(" + s"i boolean, s bigint, q int default 42) using $v2Format " + "partitioned by (i)") }, - condition = "WRONG_COLUMN_DEFAULTS_FOR_DELTA_FEATURE_NOT_ENABLED", + "WRONG_COLUMN_DEFAULTS_FOR_DELTA_FEATURE_NOT_ENABLED", parameters = Map("commandType" -> "CREATE TABLE") ) } withTable("alterTableSetDefaultFeatureNotEnabled") { sql(s"create table alterTableSetDefaultFeatureNotEnabled(a int) using $v2Format") checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("alter table alterTableSetDefaultFeatureNotEnabled alter column a set default 42") }, - condition = "WRONG_COLUMN_DEFAULTS_FOR_DELTA_FEATURE_NOT_ENABLED", + "WRONG_COLUMN_DEFAULTS_FOR_DELTA_FEATURE_NOT_ENABLED", parameters = Map("commandType" -> "ALTER TABLE") ) } @@ -620,19 +620,19 @@ class DeltaColumnDefaultsInsertSuite extends InsertIntoSQLOnlyTests with DeltaSQ sql(s"create table alterTableTest(i boolean, s bigint, q int default 42) using $v2Format " + s"partitioned by (i) $tblPropertiesAllowDefaults") checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("alter table alterTableTest add column z int default 42") }, - condition = "WRONG_COLUMN_DEFAULTS_FOR_DELTA_ALTER_TABLE_ADD_COLUMN_NOT_SUPPORTED" + "WRONG_COLUMN_DEFAULTS_FOR_DELTA_ALTER_TABLE_ADD_COLUMN_NOT_SUPPORTED" ) } // The default value fails to analyze. checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"create table t4 (s int default badvalue) using $v2Format " + s"$tblPropertiesAllowDefaults") }, - condition = INVALID_COLUMN_DEFAULT_VALUE_ERROR_MSG, + INVALID_COLUMN_DEFAULT_VALUE_ERROR_MSG, parameters = Map( "statement" -> "CREATE TABLE", "colName" -> "`s`", @@ -642,11 +642,11 @@ class DeltaColumnDefaultsInsertSuite extends InsertIntoSQLOnlyTests with DeltaSQ // The error message reports that we failed to execute the command because subquery // expressions are not allowed in DEFAULT values. checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"create table t4 (s int default (select min(x) from badtable)) using $v2Format " + tblPropertiesAllowDefaults) }, - condition = "INVALID_DEFAULT_VALUE.SUBQUERY_EXPRESSION", + "INVALID_DEFAULT_VALUE.SUBQUERY_EXPRESSION", parameters = Map( "statement" -> "CREATE TABLE", "colName" -> "`s`", @@ -656,22 +656,22 @@ class DeltaColumnDefaultsInsertSuite extends InsertIntoSQLOnlyTests with DeltaSQ // The error message reports that we failed to execute the command because subquery // expressions are not allowed in DEFAULT values. checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"create table t4 (s int default (select 42 as alias)) using $v2Format " + tblPropertiesAllowDefaults) }, - condition = "INVALID_DEFAULT_VALUE.SUBQUERY_EXPRESSION", + "INVALID_DEFAULT_VALUE.SUBQUERY_EXPRESSION", parameters = Map( "statement" -> "CREATE TABLE", "colName" -> "`s`", "defaultValue" -> "(select 42 as alias)")) // The default value parses but the type is not coercible. checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"create table t4 (s bigint default false) " + s"using $v2Format $tblPropertiesAllowDefaults") }, - condition = "INVALID_DEFAULT_VALUE.DATA_TYPE", + "INVALID_DEFAULT_VALUE.DATA_TYPE", parameters = Map( "statement" -> "CREATE TABLE", "colName" -> "`s`", @@ -702,11 +702,11 @@ class DeltaColumnDefaultsInsertSuite extends InsertIntoSQLOnlyTests with DeltaSQ // Column default values are disabled per configuration in general. withSQLConf(SQLConf.ENABLE_DEFAULT_COLUMNS.key -> "false") { checkError( - exception = intercept[ParseException] { + intercept[ParseException] { sql(s"create table t4 (s int default 41 + 1) using $v2Format " + tblPropertiesAllowDefaults) }, - condition = "UNSUPPORTED_DEFAULT_VALUE.WITH_SUGGESTION", + "UNSUPPORTED_DEFAULT_VALUE.WITH_SUGGESTION", parameters = Map.empty, context = ExpectedContext(fragment = "s int default 41 + 1", start = 17, stop = 36)) } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala index d8b53e5218..56488cf28f 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaProtocolVersionSuite.scala @@ -2531,8 +2531,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest feature.name).run(spark) } checkError( - exception = e1, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + e1, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> feature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -2550,8 +2550,8 @@ trait DeltaProtocolVersionSuiteBase extends QueryTest feature.name).run(spark) } checkError( - exception = e2, - condition = "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", + e2, + "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", parameters = Map( "feature" -> feature.name, "logRetentionPeriodKey" -> "delta.logRetentionDuration", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkImplicitCastSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkImplicitCastSuite.scala index a86a14d9cf..0d2008e52c 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkImplicitCastSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkImplicitCastSuite.scala @@ -141,8 +141,8 @@ class DeltaSinkImplicitCastSuite extends DeltaSinkImplicitCastSuiteBase { stream.write(Long.MaxValue)("CAST(value AS LONG)") } checkError( - exception = ex.getCause.asInstanceOf[SparkThrowable], - condition = "CANNOT_UP_CAST_DATATYPE", + ex.getCause.asInstanceOf[SparkThrowable], + "CANNOT_UP_CAST_DATATYPE", parameters = Map( "expression" -> "value", "sourceType" -> toSQLType("BIGINT"), @@ -173,8 +173,8 @@ class DeltaSinkImplicitCastSuite extends DeltaSinkImplicitCastSuiteBase { case e => fail(s"Unexpected exception: $e") } checkError( - exception = getSparkArithmeticException(ex), - condition = "CAST_OVERFLOW_IN_TABLE_INSERT", + getSparkArithmeticException(ex), + "CAST_OVERFLOW_IN_TABLE_INSERT", parameters = Map( "sourceType" -> "\"BIGINT\"", "targetType" -> "\"INT\"", @@ -276,8 +276,8 @@ class DeltaSinkImplicitCastSuite extends DeltaSinkImplicitCastSuiteBase { stream.write(-12)("array(value) AS s") } checkError( - exception = ex.getCause.asInstanceOf[SparkThrowable], - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + ex.getCause.asInstanceOf[SparkThrowable], + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map( "currentField" -> "s", "updateField" -> "s") @@ -473,8 +473,8 @@ class DeltaSinkImplicitCastSuite extends DeltaSinkImplicitCastSuiteBase { stream.write(23)("CAST(value AS LONG)") } checkError( - exception = ex.getCause.asInstanceOf[SparkThrowable], - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + ex.getCause.asInstanceOf[SparkThrowable], + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map( "currentField" -> "value", "updateField" -> "value") diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala index 9e8a4063bc..1578c88c12 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala @@ -407,8 +407,8 @@ class DeltaSinkSuite .save(outputDir.getCanonicalPath) } checkError( - exception = e, - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + e, + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map("currentField" -> "id", "updateField" -> "id")) } finally { query.stop() @@ -442,8 +442,8 @@ class DeltaSinkSuite } assert(wrapperException.cause.isInstanceOf[AnalysisException]) checkError( - exception = wrapperException.cause.asInstanceOf[AnalysisException], - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + wrapperException.cause.asInstanceOf[AnalysisException], + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map("currentField" -> "id", "updateField" -> "id")) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala index f1180fcd51..d87b2eebd3 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala @@ -1305,8 +1305,8 @@ class DeltaSuite extends QueryTest .save(tempDir.toString) } checkError( - exception = e, - condition = "DELTA_FAILED_TO_MERGE_FIELDS", + e, + "DELTA_FAILED_TO_MERGE_FIELDS", parameters = Map("currentField" -> "value", "updateField" -> "value")) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaTableCreationTests.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaTableCreationTests.scala index 42a57a6029..5b5db01d38 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaTableCreationTests.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaTableCreationTests.scala @@ -2391,8 +2391,8 @@ class DeltaTableCreationSuite s" LOCATION '${subdir.getCanonicalPath}'") } checkError( - exception = e, - condition = "DELTA_METADATA_ABSENT_EXISTING_CATALOG_TABLE", + e, + "DELTA_METADATA_ABSENT_EXISTING_CATALOG_TABLE", parameters = Map( "tableName" -> tableName, "tablePath" -> deltaLog.logPath.toString, diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/GeneratedColumnSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/GeneratedColumnSuite.scala index e14fe6b977..218b51b677 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/GeneratedColumnSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/GeneratedColumnSuite.scala @@ -719,12 +719,12 @@ trait GeneratedColumnSuiteBase assert(tableSchema == spark.table(table).schema) // Insert a LONG to `c1` should fail rather than changing the `c1` type to LONG. checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { Seq(32767.toLong).toDF("c1").write.format("delta").mode("append") .option("mergeSchema", "true") .saveAsTable(table) }, - condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", + "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "c1", "columnType" -> "INT", @@ -754,14 +754,14 @@ trait GeneratedColumnSuiteBase // Insert an INT to `a` should fail rather than changing the `a` type to INT checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { Seq((32767, 32767)).toDF("a", "c1a") .selectExpr("a", "named_struct('a', c1a) as c1") .write.format("delta").mode("append") .option("mergeSchema", "true") .saveAsTable(table) }, - condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", + "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a", "columnType" -> "SMALLINT", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/InCommitTimestampSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/InCommitTimestampSuite.scala index 3962e9d04d..14e80aeea0 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/InCommitTimestampSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/InCommitTimestampSuite.scala @@ -205,8 +205,8 @@ class InCommitTimestampSuite latestSnapshot.timestamp } checkError( - exception = e, - condition = "DELTA_MISSING_COMMIT_INFO", + e, + "DELTA_MISSING_COMMIT_INFO", parameters = Map( "featureName" -> InCommitTimestampTableFeature.name, "version" -> "1")) @@ -244,8 +244,8 @@ class InCommitTimestampSuite latestSnapshot.timestamp } checkError( - exception = e, - condition = "DELTA_MISSING_COMMIT_TIMESTAMP", + e, + "DELTA_MISSING_COMMIT_TIMESTAMP", parameters = Map("featureName" -> InCommitTimestampTableFeature.name, "version" -> "1")) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/SchemaValidationSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/SchemaValidationSuite.scala index 83fd0eef82..70924195f3 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/SchemaValidationSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/SchemaValidationSuite.scala @@ -348,8 +348,8 @@ class SchemaValidationSuite } checkErrorMatchPVals( - exception = e, - condition = "DELTA_SCHEMA_CHANGE_SINCE_ANALYSIS", + e, + "DELTA_SCHEMA_CHANGE_SINCE_ANALYSIS", parameters = Map( "schemaDiff" -> ".*id.*", "legacyFlagMessage" -> "" @@ -383,8 +383,8 @@ class SchemaValidationSuite .execute() } checkErrorMatchPVals( - exception = e, - condition = "DELTA_SCHEMA_CHANGE_SINCE_ANALYSIS", + e, + "DELTA_SCHEMA_CHANGE_SINCE_ANALYSIS", parameters = Map( "schemaDiff" -> ".*col2.*", "legacyFlagMessage" -> "" diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/UpdateSQLSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/UpdateSQLSuite.scala index f9f1123444..fa046d00cb 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/UpdateSQLSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/UpdateSQLSuite.scala @@ -117,10 +117,10 @@ class UpdateSQLSuite extends UpdateSuiteBase SQLConf.STORE_ASSIGNMENT_POLICY.key -> StoreAssignmentPolicy.STRICT.toString, DeltaSQLConf.UPDATE_AND_MERGE_CASTING_FOLLOWS_ANSI_ENABLED_FLAG.key -> "false") { checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { executeUpdate(target = s"delta.`$tempPath`", set = "value = 'false'") }, - condition = "CANNOT_UP_CAST_DATATYPE", + "CANNOT_UP_CAST_DATATYPE", parameters = Map( "expression" -> "'false'", "sourceType" -> toSQLType("STRING"), @@ -139,11 +139,11 @@ class UpdateSQLSuite extends UpdateSuiteBase SQLConf.STORE_ASSIGNMENT_POLICY.key -> StoreAssignmentPolicy.STRICT.toString, DeltaSQLConf.UPDATE_AND_MERGE_CASTING_FOLLOWS_ANSI_ENABLED_FLAG.key -> "false") { checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { executeUpdate(target = s"delta.`$tempPath`", set = "value = '5'") }, - condition = "CANNOT_UP_CAST_DATATYPE", - parameters = Map( + "CANNOT_UP_CAST_DATATYPE", + parameters = Map( "expression" -> "'5'", "sourceType" -> toSQLType("STRING"), "targetType" -> toSQLType("INT"), diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/columnmapping/DropColumnMappingFeatureSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/columnmapping/DropColumnMappingFeatureSuite.scala index 4f61db4f55..be5abc7f37 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/columnmapping/DropColumnMappingFeatureSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/columnmapping/DropColumnMappingFeatureSuite.scala @@ -58,8 +58,7 @@ class DropColumnMappingFeatureSuite extends RemoveColumnMappingSuiteUtils { dropColumnMappingTableFeature() } checkError(e, - condition = DeltaErrors.dropTableFeatureFeatureNotSupportedByProtocol(".") - .getErrorClass, + DeltaErrors.dropTableFeatureFeatureNotSupportedByProtocol(".").getErrorClass, parameters = Map("feature" -> "columnMapping")) } @@ -76,7 +75,7 @@ class DropColumnMappingFeatureSuite extends RemoveColumnMappingSuiteUtils { dropColumnMappingTableFeature() } checkError(e, - condition = "DELTA_INVALID_COLUMN_NAMES_WHEN_REMOVING_COLUMN_MAPPING", + "DELTA_INVALID_COLUMN_NAMES_WHEN_REMOVING_COLUMN_MAPPING", parameters = Map("invalidColumnNames" -> "col1 with special chars ,;{}()\n\t=")) } @@ -125,7 +124,7 @@ class DropColumnMappingFeatureSuite extends RemoveColumnMappingSuiteUtils { } checkError( e, - condition = "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", + "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", parameters = Map( "feature" -> "columnMapping", "logRetentionPeriodKey" -> "delta.logRetentionDuration", @@ -168,7 +167,7 @@ class DropColumnMappingFeatureSuite extends RemoveColumnMappingSuiteUtils { } checkError( e, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> "columnMapping", "logRetentionPeriodKey" -> "delta.logRetentionDuration", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala index 96010c8424..9284b27a57 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/coordinatedcommits/CoordinatedCommitsSuite.scala @@ -1322,7 +1322,7 @@ class CoordinatedCommitsSuite tableMutationFn() } checkError(e, - condition = "DELTA_UNSUPPORTED_WRITES_WITHOUT_COORDINATOR", + "DELTA_UNSUPPORTED_WRITES_WITHOUT_COORDINATOR", sqlState = "0AKDC", parameters = Map("coordinatorName" -> "tracking-in-memory") ) @@ -1585,8 +1585,8 @@ class CoordinatedCommitsSuite s"'${COORDINATED_COMMITS_COORDINATOR_CONF.key}' = '${JsonUtils.toJson(Map())}')") } checkError( - exception = e, - condition = "DELTA_CANNOT_OVERRIDE_COORDINATED_COMMITS_CONFS", + e, + "DELTA_CANNOT_OVERRIDE_COORDINATED_COMMITS_CONFS", sqlState = "42616", parameters = Map("Command" -> "ALTER")) } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/MaterializedColumnSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/MaterializedColumnSuite.scala index 4b7932b6e4..6b4802f2c0 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/MaterializedColumnSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/MaterializedColumnSuite.scala @@ -89,7 +89,7 @@ class MaterializedColumnSuite extends RowIdTestUtils sql(s"ALTER TABLE $testTableName " + s"RENAME COLUMN $testDataColumnName TO `$materializedColumnName`") } - checkError(error, condition = "DELTA_ADDING_COLUMN_WITH_INTERNAL_NAME_FAILED", + checkError(error, "DELTA_ADDING_COLUMN_WITH_INTERNAL_NAME_FAILED", parameters = Map("colName" -> materializedColumnName)) } } @@ -111,7 +111,7 @@ class MaterializedColumnSuite extends RowIdTestUtils val error = intercept[DeltaRuntimeException] { sql(s"CREATE OR REPLACE TABLE $targetName SHALLOW CLONE $sourceName") } - checkError(error, condition = "DELTA_ADDING_COLUMN_WITH_INTERNAL_NAME_FAILED", + checkError(error, "DELTA_ADDING_COLUMN_WITH_INTERNAL_NAME_FAILED", parameters = Map("colName" -> materializedColumnName)) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/RowTrackingReadWriteSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/RowTrackingReadWriteSuite.scala index 941ce4781e..80fc755625 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/RowTrackingReadWriteSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/RowTrackingReadWriteSuite.scala @@ -220,7 +220,7 @@ class RowTrackingReadWriteSuite extends RowIdTestUtils val errorRowIds = intercept[AnalysisException](sql(insertStmt1 + " VALUES(1, 2)")) checkError( errorRowIds, - condition = "UNRESOLVED_COLUMN.WITH_SUGGESTION", + "UNRESOLVED_COLUMN.WITH_SUGGESTION", parameters = errorRowIds.messageParameters, queryContext = Array(ExpectedContext(insertStmt1, 0, insertStmt1.length - 1))) @@ -229,7 +229,7 @@ class RowTrackingReadWriteSuite extends RowIdTestUtils val errorRowCommitVersions = intercept[AnalysisException](sql(insertStmt2 + " VALUES(1, 2)")) checkError( errorRowCommitVersions, - condition = "UNRESOLVED_COLUMN.WITH_SUGGESTION", + "UNRESOLVED_COLUMN.WITH_SUGGESTION", parameters = errorRowCommitVersions.messageParameters, queryContext = Array(ExpectedContext(insertStmt2, 0, insertStmt2.length - 1))) } @@ -285,7 +285,7 @@ class RowTrackingReadWriteSuite extends RowIdTestUtils } checkError( error, - condition = "UNRESOLVED_COLUMN.WITH_SUGGESTION", + "UNRESOLVED_COLUMN.WITH_SUGGESTION", parameters = error.messageParameters) } @@ -303,7 +303,7 @@ class RowTrackingReadWriteSuite extends RowIdTestUtils } checkError( error, - condition = "UNRESOLVED_COLUMN.WITH_SUGGESTION", + "UNRESOLVED_COLUMN.WITH_SUGGESTION", parameters = error.messageParameters) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/schema/CheckConstraintsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/schema/CheckConstraintsSuite.scala index 501a51252c..f86fe164e8 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/schema/CheckConstraintsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/schema/CheckConstraintsSuite.scala @@ -80,7 +80,7 @@ class CheckConstraintsSuite extends QueryTest exception = intercept[AnalysisException] { sql(s"ALTER TABLE $table ADD CONSTRAINT integerVal CHECK (3)") }, - condition = "DELTA_NON_BOOLEAN_CHECK_CONSTRAINT", + "DELTA_NON_BOOLEAN_CHECK_CONSTRAINT", parameters = Map( "name" -> "integerVal", "expr" -> "3" @@ -92,10 +92,10 @@ class CheckConstraintsSuite extends QueryTest test("can't add constraint referencing non-existent columns") { withTestTable { table => checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE $table ADD CONSTRAINT c CHECK (does_not_exist)") }, - condition = "UNRESOLVED_COLUMN.WITH_SUGGESTION", + "UNRESOLVED_COLUMN.WITH_SUGGESTION", parameters = Map( "objectName" -> "`does_not_exist`", "proposal" -> "`text`, `num`" @@ -451,7 +451,7 @@ class CheckConstraintsSuite extends QueryTest } checkError( exception, - condition = "DELTA_EXCEED_CHAR_VARCHAR_LIMIT", + "DELTA_EXCEED_CHAR_VARCHAR_LIMIT", parameters = Map( "value" -> "a very long string", "expr" -> "((value IS NULL) OR (length(value) <= 12))" @@ -474,7 +474,7 @@ class CheckConstraintsSuite extends QueryTest } checkError( error1, - condition = "DELTA_CANNOT_DROP_CHECK_CONSTRAINT_FEATURE", + "DELTA_CANNOT_DROP_CHECK_CONSTRAINT_FEATURE", parameters = Map("constraints" -> "`c1`, `c2`") ) val deltaLog = DeltaLog.forTable(spark, TableIdentifier("table")) @@ -488,7 +488,7 @@ class CheckConstraintsSuite extends QueryTest } checkError( error2, - condition = "DELTA_CANNOT_DROP_CHECK_CONSTRAINT_FEATURE", + "DELTA_CANNOT_DROP_CHECK_CONSTRAINT_FEATURE", parameters = Map("constraints" -> "`c2`") ) val featureNames2 = diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala index cd75f9db28..8c19cc79ff 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/schema/SchemaUtilsSuite.scala @@ -87,8 +87,8 @@ class SchemaUtilsSuite extends QueryTest val err = getError(e) assert(err.isDefined, "exception with the error class not found") checkError( - exception = err.get, - condition = errorClass, + err.get, + errorClass, parameters = params, matchPVals = true) } @@ -1680,8 +1680,8 @@ class SchemaUtilsSuite extends QueryTest Seq("x", "Y"), new StructType()) } checkError( - exception = exception, - condition = "DELTA_CANNOT_RESOLVE_COLUMN", + exception, + "DELTA_CANNOT_RESOLVE_COLUMN", sqlState = "42703", parameters = Map("columnName" -> "x.Y.bb", "schema" -> "root\n") ) diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala index c374295e9a..2acf0205ab 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/skipping/clustering/ClusteredTableDDLSuite.scala @@ -267,8 +267,8 @@ trait ClusteredTableCreateOrReplaceDDLSuiteBase extends QueryTest assert(dataTypeOpt.nonEmpty, s"Can't find column $colName " + s"in schema ${tableSchema.treeString}") checkError( - exception = e, - condition = "DELTA_CLUSTERING_COLUMNS_DATATYPE_NOT_SUPPORTED", + e, + "DELTA_CLUSTERING_COLUMNS_DATATYPE_NOT_SUPPORTED", parameters = Map("columnsWithDataTypes" -> s"$colName : ${dataTypeOpt.get.sql}") ) } @@ -287,8 +287,8 @@ trait ClusteredTableCreateOrReplaceDDLSuiteBase extends QueryTest "CREATE", testTable, "a INT, b INT, c INT, d INT, e INT", "a, b, c, d, e") } checkError( - exception = e, - condition = "DELTA_CLUSTER_BY_INVALID_NUM_COLUMNS", + e, + "DELTA_CLUSTER_BY_INVALID_NUM_COLUMNS", parameters = Map("numColumnsLimit" -> "4", "actualNumColumns" -> "5") ) } @@ -305,8 +305,8 @@ trait ClusteredTableCreateOrReplaceDDLSuiteBase extends QueryTest "CREATE", testTable, sourceTable, "a, b, c, d, e", location = location) } checkError( - exception = e, - condition = "DELTA_CLUSTER_BY_INVALID_NUM_COLUMNS", + e, + "DELTA_CLUSTER_BY_INVALID_NUM_COLUMNS", parameters = Map("numColumnsLimit" -> "4", "actualNumColumns" -> "5") ) } @@ -354,8 +354,8 @@ trait ClusteredTableCreateOrReplaceDDLSuiteBase extends QueryTest indexedColumns, Some(tableSchema))) checkError( - exception = e, - condition = "DELTA_CLUSTERING_COLUMN_MISSING_STATS", + e, + "DELTA_CLUSTERING_COLUMN_MISSING_STATS", parameters = Map( "columns" -> "col1.col12, col2", "schema" -> """root @@ -411,8 +411,8 @@ trait ClusteredTableCreateOrReplaceDDLSuiteBase extends QueryTest None, location = Some(dir.getPath))) checkError( - exception = e, - condition = "DELTA_CLUSTERING_COLUMN_MISSING_STATS", + e, + "DELTA_CLUSTERING_COLUMN_MISSING_STATS", parameters = Map( "columns" -> "col1.col12, col2", "schema" -> """root @@ -456,8 +456,8 @@ trait ClusteredTableCreateOrReplaceDDLSuiteBase extends QueryTest indexedColumns, Some(nonEligibleTableSchema))) checkError( - exception = e, - condition = "DELTA_CLUSTERING_COLUMNS_DATATYPE_NOT_SUPPORTED", + e, + "DELTA_CLUSTERING_COLUMNS_DATATYPE_NOT_SUPPORTED", parameters = Map("columnsWithDataTypes" -> "col1.col11 : ARRAY") ) } @@ -553,8 +553,8 @@ trait ClusteredTableDDLWithColumnMapping sql(s"ALTER TABLE $testTable DROP COLUMNS (col1)") } checkError( - exception = e, - condition = "DELTA_UNSUPPORTED_DROP_CLUSTERING_COLUMN", + e, + "DELTA_UNSUPPORTED_DROP_CLUSTERING_COLUMN", parameters = Map("columnList" -> "col1") ) // Drop non-clustering columns are allowed. @@ -568,8 +568,8 @@ trait ClusteredTableDDLWithColumnMapping sql(s"ALTER TABLE $testTable DROP COLUMNS (col1, col2)") } checkError( - exception = e, - condition = "DELTA_UNSUPPORTED_DROP_CLUSTERING_COLUMN", + e, + "DELTA_UNSUPPORTED_DROP_CLUSTERING_COLUMN", parameters = Map("columnList" -> "col1,col2") ) } @@ -582,8 +582,8 @@ trait ClusteredTableDDLWithColumnMapping sql(s"ALTER TABLE $testTable DROP COLUMNS (col1, col3)") } checkError( - exception = e, - condition = "DELTA_UNSUPPORTED_DROP_CLUSTERING_COLUMN", + e, + "DELTA_UNSUPPORTED_DROP_CLUSTERING_COLUMN", parameters = Map("columnList" -> "col1") ) } @@ -659,7 +659,7 @@ trait ClusteredTableDDLSuiteBase } checkError( e, - condition = "DELTA_CLUSTER_BY_INVALID_NUM_COLUMNS", + "DELTA_CLUSTER_BY_INVALID_NUM_COLUMNS", parameters = Map( "numColumnsLimit" -> "4", "actualNumColumns" -> "5") @@ -782,8 +782,8 @@ trait ClusteredTableDDLSuiteBase sql(s"OPTIMIZE $testTable ZORDER BY (a)") } checkError( - exception = e2, - condition = "DELTA_CLUSTERING_WITH_ZORDER_BY", + e2, + "DELTA_CLUSTERING_WITH_ZORDER_BY", parameters = Map("zOrderBy" -> "a") ) } @@ -911,7 +911,7 @@ trait ClusteredTableDDLSuiteBase } checkError( e, - condition = "DELTA_CANNOT_MODIFY_TABLE_PROPERTY", + "DELTA_CANNOT_MODIFY_TABLE_PROPERTY", parameters = Map("prop" -> "clusteringColumns")) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableNestedSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableNestedSuite.scala index b1c6e3ff3e..476bfecbde 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableNestedSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableNestedSuite.scala @@ -57,8 +57,8 @@ trait TypeWideningAlterTableNestedTests { // Running ALTER TABLE CHANGE COLUMN on non-leaf fields is invalid. var alterTableSql = s"ALTER TABLE delta.`$tempPath` CHANGE COLUMN s TYPE struct" checkError( - exception = intercept[AnalysisException] { sql(alterTableSql) }, - condition = "CANNOT_UPDATE_FIELD.STRUCT_TYPE", + intercept[AnalysisException] { sql(alterTableSql) }, + "CANNOT_UPDATE_FIELD.STRUCT_TYPE", parameters = Map( "table" -> s"`spark_catalog`.`delta`.`$tempPath`", "fieldName" -> "`s`" @@ -71,8 +71,8 @@ trait TypeWideningAlterTableNestedTests { alterTableSql = s"ALTER TABLE delta.`$tempPath` CHANGE COLUMN m TYPE map" checkError( - exception = intercept[AnalysisException] { sql(alterTableSql) }, - condition = "CANNOT_UPDATE_FIELD.MAP_TYPE", + intercept[AnalysisException] { sql(alterTableSql) }, + "CANNOT_UPDATE_FIELD.MAP_TYPE", parameters = Map( "table" -> s"`spark_catalog`.`delta`.`$tempPath`", "fieldName" -> "`m`" @@ -85,8 +85,8 @@ trait TypeWideningAlterTableNestedTests { alterTableSql = s"ALTER TABLE delta.`$tempPath` CHANGE COLUMN a TYPE array" checkError( - exception = intercept[AnalysisException] { sql(alterTableSql) }, - condition = "CANNOT_UPDATE_FIELD.ARRAY_TYPE", + intercept[AnalysisException] { sql(alterTableSql) }, + "CANNOT_UPDATE_FIELD.ARRAY_TYPE", parameters = Map( "table" -> s"`spark_catalog`.`delta`.`$tempPath`", "fieldName" -> "`a`" diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableSuite.scala index e41daf95cb..f772c9ca2a 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningAlterTableSuite.scala @@ -99,10 +99,10 @@ trait TypeWideningAlterTableTests // are rejected in Delta when the ALTER TABLE command is executed. if (Cast.canUpCast(testCase.fromType, testCase.toType)) { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql(alterTableSql) }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", sqlState = None, parameters = Map( "fieldPath" -> "value", @@ -111,10 +111,10 @@ trait TypeWideningAlterTableTests ) } else { checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(alterTableSql) }, - condition = "NOT_SUPPORTED_CHANGE_COLUMN", + "NOT_SUPPORTED_CHANGE_COLUMN", sqlState = None, parameters = Map( "table" -> s"`spark_catalog`.`delta`.`$tempPath`", @@ -176,10 +176,10 @@ trait TypeWideningAlterTableTests .mkString(", ") checkError( - exception = intercept[DeltaTableFeatureException] { + intercept[DeltaTableFeatureException] { sql(s"ALTER TABLE delta.`$tempPath` CHANGE COLUMN a TYPE TIMESTAMP_NTZ") }, - condition = "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT", + "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT", parameters = Map( "unsupportedFeatures" -> "timestampNtz", "supportedFeatures" -> currentFeatures diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningConstraintsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningConstraintsSuite.scala index dc6f1525a3..17e75bbb2d 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningConstraintsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningConstraintsSuite.scala @@ -57,10 +57,10 @@ trait TypeWideningConstraintsTests { self: QueryTest with TypeWideningTestMixin // Changing the type of a column that a CHECK constraint depends on is not allowed. checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("ALTER TABLE t CHANGE COLUMN a TYPE SMALLINT") }, - condition = "DELTA_CONSTRAINT_DEPENDENT_COLUMN_CHANGE", + "DELTA_CONSTRAINT_DEPENDENT_COLUMN_CHANGE", parameters = Map( "columnName" -> "a", "constraints" -> "delta.constraints.ck -> hash ( a ) > 0" @@ -81,10 +81,10 @@ trait TypeWideningConstraintsTests { self: QueryTest with TypeWideningTestMixin checkAnswer(sql("SELECT hash(a.x) FROM t"), Row(1765031574)) checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("ALTER TABLE t CHANGE COLUMN a.x TYPE SMALLINT") }, - condition = "DELTA_CONSTRAINT_DEPENDENT_COLUMN_CHANGE", + "DELTA_CONSTRAINT_DEPENDENT_COLUMN_CHANGE", parameters = Map( "columnName" -> "a.x", "constraints" -> "delta.constraints.ck -> hash ( a . x ) > 0" @@ -105,10 +105,10 @@ trait TypeWideningConstraintsTests { self: QueryTest with TypeWideningTestMixin withSQLConf(DeltaSQLConf.DELTA_SCHEMA_AUTO_MIGRATE.key -> "true") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("INSERT INTO t VALUES (200)") }, - condition = "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", + "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a", "columnType" -> "TINYINT", @@ -128,10 +128,10 @@ trait TypeWideningConstraintsTests { self: QueryTest with TypeWideningTestMixin withSQLConf(DeltaSQLConf.DELTA_SCHEMA_AUTO_MIGRATE.key -> "true") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("INSERT INTO t (a) VALUES (named_struct('x', 200, 'y', CAST(5 AS byte)))") }, - condition = "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", + "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a.x", "columnType" -> "TINYINT", @@ -157,7 +157,7 @@ trait TypeWideningConstraintsTests { self: QueryTest with TypeWideningTestMixin withSQLConf(DeltaSQLConf.DELTA_SCHEMA_AUTO_MIGRATE.key -> "true") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql( s""" | INSERT INTO t (a) VALUES ( @@ -166,7 +166,7 @@ trait TypeWideningConstraintsTests { self: QueryTest with TypeWideningTestMixin |""".stripMargin ) }, - condition = "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", + "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a.x.z", "columnType" -> "TINYINT", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningFeatureCompatibilitySuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningFeatureCompatibilitySuite.scala index d764fff8cc..d524d2fcd6 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningFeatureCompatibilitySuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningFeatureCompatibilitySuite.scala @@ -53,10 +53,10 @@ trait TypeWideningCompatibilityTests { .drop(CDCReader.CDC_COMMIT_VERSION) checkErrorMatchPVals( - exception = intercept[DeltaUnsupportedOperationException] { + intercept[DeltaUnsupportedOperationException] { readCDF(start = 1, end = 1).collect() }, - condition = "DELTA_CHANGE_DATA_FEED_INCOMPATIBLE_DATA_SCHEMA", + "DELTA_CHANGE_DATA_FEED_INCOMPATIBLE_DATA_SCHEMA", parameters = Map( "start" -> "1", "end" -> "1", @@ -92,10 +92,10 @@ trait TypeWideningCompatibilityTests { checkAnswer(readCDF(start = 1, end = 1), Seq(Row(1, "insert"), Row(2, "insert"))) checkErrorMatchPVals( - exception = intercept[DeltaUnsupportedOperationException] { + intercept[DeltaUnsupportedOperationException] { readCDF(start = 1, end = 3) }, - condition = "DELTA_CHANGE_DATA_FEED_INCOMPATIBLE_SCHEMA_CHANGE", + "DELTA_CHANGE_DATA_FEED_INCOMPATIBLE_SCHEMA_CHANGE", parameters = Map( "start" -> "1", "end" -> "3", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningGeneratedColumnsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningGeneratedColumnsSuite.scala index e4b7cd8e2f..7f8ebc2033 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningGeneratedColumnsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningGeneratedColumnsSuite.scala @@ -48,10 +48,10 @@ trait TypeWideningGeneratedColumnTests extends GeneratedColumnTest { // Changing the type of a column that a generated column depends on is not allowed. checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("ALTER TABLE t CHANGE COLUMN a TYPE SMALLINT") }, - condition = "DELTA_GENERATED_COLUMNS_DEPENDENT_COLUMN_CHANGE", + "DELTA_GENERATED_COLUMNS_DEPENDENT_COLUMN_CHANGE", parameters = Map( "columnName" -> "a", "generatedColumns" -> "gen -> hash(a)" @@ -77,10 +77,10 @@ trait TypeWideningGeneratedColumnTests extends GeneratedColumnTest { checkAnswer(sql("SELECT hash(a.x) FROM t"), Row(1765031574)) checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("ALTER TABLE t CHANGE COLUMN a.x TYPE SMALLINT") }, - condition = "DELTA_GENERATED_COLUMNS_DEPENDENT_COLUMN_CHANGE", + "DELTA_GENERATED_COLUMNS_DEPENDENT_COLUMN_CHANGE", parameters = Map( "columnName" -> "a.x", "generatedColumns" -> "gen -> hash(a.x)" @@ -106,10 +106,10 @@ trait TypeWideningGeneratedColumnTests extends GeneratedColumnTest { withSQLConf(DeltaSQLConf.DELTA_SCHEMA_AUTO_MIGRATE.key -> "true") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("INSERT INTO t (a) VALUES (200)") }, - condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", + "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a", "columnType" -> "TINYINT", @@ -134,10 +134,10 @@ trait TypeWideningGeneratedColumnTests extends GeneratedColumnTest { withSQLConf(DeltaSQLConf.DELTA_SCHEMA_AUTO_MIGRATE.key -> "true") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql("INSERT INTO t (a) VALUES (named_struct('x', 200, 'y', CAST(5 AS byte)))") }, - condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", + "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a.x", "columnType" -> "TINYINT", @@ -169,7 +169,7 @@ trait TypeWideningGeneratedColumnTests extends GeneratedColumnTest { withSQLConf(DeltaSQLConf.DELTA_SCHEMA_AUTO_MIGRATE.key -> "true") { checkError( - exception = intercept[DeltaAnalysisException] { + intercept[DeltaAnalysisException] { sql( s""" | INSERT INTO t (a) VALUES ( @@ -178,7 +178,7 @@ trait TypeWideningGeneratedColumnTests extends GeneratedColumnTest { |""".stripMargin ) }, - condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", + "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH", parameters = Map( "columnName" -> "a.x.z", "columnType" -> "TINYINT", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTableFeatureSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTableFeatureSuite.scala index acdf755549..64805d2602 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTableFeatureSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTableFeatureSuite.scala @@ -110,11 +110,11 @@ trait TypeWideningTableFeatureTests extends RowTrackingTestUtils with TypeWideni sql(s"CREATE TABLE delta.`$tempPath` (a int) USING DELTA " + s"TBLPROPERTIES ('${DeltaConfigs.ENABLE_TYPE_WIDENING.key}' = 'false')") checkError( - exception = intercept[SparkException] { + intercept[SparkException] { sql(s"ALTER TABLE delta.`$tempPath` " + s"SET TBLPROPERTIES ('${DeltaConfigs.ENABLE_TYPE_WIDENING.key}' = 'bla')") }, - condition = "_LEGACY_ERROR_TEMP_2045", + "_LEGACY_ERROR_TEMP_2045", parameters = Map( "message" -> "For input string: \"bla\"" ) @@ -128,10 +128,10 @@ trait TypeWideningTableFeatureTests extends RowTrackingTestUtils with TypeWideni s"TBLPROPERTIES ('${DeltaConfigs.ENABLE_TYPE_WIDENING.key}' = 'false')") checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE delta.`$tempPath` CHANGE COLUMN a TYPE SMALLINT") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "a", "oldField" -> "TINYINT", @@ -147,10 +147,10 @@ trait TypeWideningTableFeatureTests extends RowTrackingTestUtils with TypeWideni s"SET TBLPROPERTIES ('${DeltaConfigs.ENABLE_TYPE_WIDENING.key}' = 'false')") checkError( - exception = intercept[AnalysisException] { + intercept[AnalysisException] { sql(s"ALTER TABLE delta.`$tempPath` CHANGE COLUMN a TYPE INT") }, - condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", + "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP", parameters = Map( "fieldPath" -> "a", "oldField" -> "SMALLINT", @@ -192,12 +192,12 @@ trait TypeWideningTableFeatureTests extends RowTrackingTestUtils with TypeWideni val deltaLog = DeltaLog.forTable(spark, TableIdentifier(tableName, Some(databaseName))) checkError( - exception = intercept[DeltaTableFeatureException] { + intercept[DeltaTableFeatureException] { sql(s"ALTER TABLE $databaseName.$tableName " + s"DROP FEATURE '${TypeWideningPreviewTableFeature.name}'" ).collect() }, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> TypeWideningPreviewTableFeature.name, "logRetentionPeriodKey" -> DeltaConfigs.LOG_RETENTION.key, @@ -442,10 +442,10 @@ trait TypeWideningTableFeatureTests extends RowTrackingTestUtils with TypeWideni } checkError( - exception = intercept[DeltaIllegalStateException] { + intercept[DeltaIllegalStateException] { readDeltaTable(tempPath).collect() }, - condition = "DELTA_UNSUPPORTED_TYPE_CHANGE_IN_SCHEMA", + "DELTA_UNSUPPORTED_TYPE_CHANGE_IN_SCHEMA", parameters = Map( "fieldName" -> "a.element", "fromType" -> "INT", diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTestMixin.scala b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTestMixin.scala index 304926ae4e..71ce407216 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTestMixin.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/typewidening/TypeWideningTestMixin.scala @@ -150,8 +150,8 @@ trait TypeWideningDropFeatureTestMixin dropFeature.run(spark) case ExpectedOutcome.FAIL_CURRENT_VERSION_USES_FEATURE => checkError( - exception = intercept[DeltaTableFeatureException] { dropFeature.run(spark) }, - condition = "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", + intercept[DeltaTableFeatureException] { dropFeature.run(spark) }, + "DELTA_FEATURE_DROP_WAIT_FOR_RETENTION_PERIOD", parameters = Map( "feature" -> feature.name, "logRetentionPeriodKey" -> DeltaConfigs.LOG_RETENTION.key, @@ -163,8 +163,8 @@ trait TypeWideningDropFeatureTestMixin ) case ExpectedOutcome.FAIL_HISTORICAL_VERSION_USES_FEATURE => checkError( - exception = intercept[DeltaTableFeatureException] { dropFeature.run(spark) }, - condition = "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", + intercept[DeltaTableFeatureException] { dropFeature.run(spark) }, + "DELTA_FEATURE_DROP_HISTORICAL_VERSIONS_EXIST", parameters = Map( "feature" -> feature.name, "logRetentionPeriodKey" -> DeltaConfigs.LOG_RETENTION.key, @@ -176,8 +176,8 @@ trait TypeWideningDropFeatureTestMixin ) case ExpectedOutcome.FAIL_FEATURE_NOT_PRESENT => checkError( - exception = intercept[DeltaTableFeatureException] { dropFeature.run(spark) }, - condition = "DELTA_FEATURE_DROP_FEATURE_NOT_PRESENT", + intercept[DeltaTableFeatureException] { dropFeature.run(spark) }, + "DELTA_FEATURE_DROP_FEATURE_NOT_PRESENT", parameters = Map("feature" -> feature.name) ) }