Skip to content

Commit

Permalink
Pass as positional parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxGekk committed Sep 17, 2024
1 parent b101996 commit 8ee008f
Show file tree
Hide file tree
Showing 34 changed files with 215 additions and 220 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class DeltaVariantSuite
// check previously thrown error message
checkError(
e,
condition = "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT",
"DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT",
parameters = Map(
"unsupportedFeatures" -> VariantTypeTableFeature.name,
"supportedFeatures" -> currentFeatures
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -463,9 +463,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper {
val parser = new DeltaSqlParser(new SparkSqlParser())
val sql =
clusterByStatement(clause, asSelect, "a int, b string", "CLUSTER BY (a) CLUSTER BY (b)")
checkError(exception = intercept[ParseException] {
checkError(intercept[ParseException] {
parser.parsePlan(sql)
}, condition = "DUPLICATE_CLAUSES", parameters = Map("clauseName" -> "CLUSTER BY"))
}, "DUPLICATE_CLAUSES", parameters = Map("clauseName" -> "CLUSTER BY"))
}

test("CLUSTER BY set clustering column property is ignored - " +
Expand All @@ -492,9 +492,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper {
"CLUSTER BY (a) PARTITIONED BY (b)")
val errorMsg = "Clustering and partitioning cannot both be specified. " +
"Please remove PARTITIONED BY if you want to create a Delta table with clustering"
checkError(exception = intercept[ParseException] {
checkError(intercept[ParseException] {
parser.parsePlan(sql)
}, condition = "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}, "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}

test(s"CLUSTER BY with bucketing - $clause TABLE asSelect = $asSelect") {
Expand All @@ -508,9 +508,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper {
val errorMsg = "Clustering and bucketing cannot both be specified. " +
"Please remove CLUSTERED BY INTO BUCKETS if you " +
"want to create a Delta table with clustering"
checkError(exception = intercept[ParseException] {
checkError(intercept[ParseException] {
parser.parsePlan(sql)
}, condition = "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}, "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -492,10 +492,7 @@ class DeltaTableBuilderSuite
.execute()
}

checkError(
exception = e,
condition = "DELTA_CLUSTER_BY_WITH_PARTITIONED_BY"
)
checkError(e, "DELTA_CLUSTER_BY_WITH_PARTITIONED_BY")
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -924,17 +924,17 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.key COMMENT 'a comment'")
},
condition = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
"DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
parameters = Map("fieldPath" -> "a.key")
)
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.value COMMENT 'a comment'")
},
condition = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
"DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
parameters = Map("fieldPath" -> "a.value")
)
}
Expand All @@ -945,10 +945,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", array('v1))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.element COMMENT 'a comment'")
},
condition = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
"DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
parameters = Map("fieldPath" -> "a.element")
)
}
Expand All @@ -959,20 +959,20 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName RENAME COLUMN a.key TO key2")
},
condition = "INVALID_FIELD_NAME",
"INVALID_FIELD_NAME",
parameters = Map(
"fieldName" -> "`a`.`key2`",
"path" -> "`a`"
)
)
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName RENAME COLUMN a.value TO value2")
},
condition = "INVALID_FIELD_NAME",
"INVALID_FIELD_NAME",
parameters = Map(
"fieldName" -> "`a`.`value2`",
"path" -> "`a`"
Expand All @@ -986,10 +986,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", array('v1))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName RENAME COLUMN a.element TO element2")
},
condition = "INVALID_FIELD_NAME",
"INVALID_FIELD_NAME",
parameters = Map(
"fieldName" -> "`a`.`element2`",
"path" -> "`a`"
Expand All @@ -1008,10 +1008,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
ddlTest("CHANGE COLUMN - incompatible") {
withDeltaTable(Seq((1, "a"), (2, "b")).toDF("v1", "v2")) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN v1 v1 long")
},
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "v1",
"oldField" -> "INT",
Expand All @@ -1026,10 +1026,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("struct", struct("v1", "v2"))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN struct.v1 v1 long")
},
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "struct.v1",
"oldField" -> "INT",
Expand All @@ -1044,10 +1044,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.key key long")
},
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.key",
"oldField" -> "INT NOT NULL",
Expand All @@ -1062,10 +1062,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.value value long")
},
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.value",
"oldField" -> "INT",
Expand All @@ -1080,10 +1080,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", array('v1))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.element element long")
},
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.element",
"oldField" -> "INT",
Expand Down Expand Up @@ -1383,8 +1383,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
// Changing the nullability of map/array fields is not allowed.
var statement = s"ALTER TABLE $tableName CHANGE COLUMN m.key DROP NOT NULL"
checkError(
exception = intercept[AnalysisException] { sql(statement) },
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
intercept[AnalysisException] { sql(statement) },
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "m.key",
"oldField" -> "INT NOT NULL",
Expand All @@ -1394,8 +1394,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {

statement = s"ALTER TABLE $tableName CHANGE COLUMN m.value SET NOT NULL"
checkError(
exception = intercept[AnalysisException] { sql(statement) },
condition = "_LEGACY_ERROR_TEMP_2330",
intercept[AnalysisException] { sql(statement) },
"_LEGACY_ERROR_TEMP_2330",
parameters = Map(
"fieldName" -> "m.value"
),
Expand All @@ -1404,8 +1404,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {

statement = s"ALTER TABLE $tableName CHANGE COLUMN a.element SET NOT NULL"
checkError(
exception = intercept[AnalysisException] { sql(statement) },
condition = "_LEGACY_ERROR_TEMP_2330",
intercept[AnalysisException] { sql(statement) },
"_LEGACY_ERROR_TEMP_2330",
parameters = Map(
"fieldName" -> "a.element"
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,10 +289,10 @@ class DeltaCDCSQLSuite extends DeltaCDCSuiteBase with DeltaColumnMappingTestUtil
withTable(tbl) {
spark.range(10).write.format("delta").saveAsTable(tbl)
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"SELECT * FROM table_changes('$tbl', 0, id)")
},
condition = "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION",
"UNRESOLVED_COLUMN.WITHOUT_SUGGESTION",
parameters = Map("objectName" -> "`id`"),
queryContext = Array(ExpectedContext(
fragment = "id",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1944,10 +1944,9 @@ class DeltaColumnMappingSuite extends QueryTest
|TBLPROPERTIES('${DeltaConfigs.COLUMN_MAPPING_MODE.key}'='none')
|""".stripMargin)
}
val errorClass = "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAMES"
checkError(
exception = e,
condition = errorClass,
e,
"DELTA_INVALID_CHARACTERS_IN_COLUMN_NAMES",
parameters = DeltaThrowableHelper
.getParameterNames(errorClass, errorSubClass = null)
.zip(invalidColumns).toMap
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -677,8 +677,8 @@ class DeltaDataFrameWriterV2Suite
def verifyNotImplicitCasting(f: => Unit): Unit = {
val e = intercept[DeltaAnalysisException](f)
checkError(
exception = e.getCause.asInstanceOf[DeltaAnalysisException],
condition = "DELTA_MERGE_INCOMPATIBLE_DATATYPE",
e.getCause.asInstanceOf[DeltaAnalysisException],
"DELTA_MERGE_INCOMPATIBLE_DATATYPE",
parameters = Map("currentDataType" -> "LongType", "updateDataType" -> "IntegerType"))
}
verifyNotImplicitCasting {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -450,10 +450,10 @@ class DeltaDropColumnSuite extends QueryTest
field <- Seq("m.key", "m.value", "a.element")
}
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE delta_test DROP COLUMN $field")
},
condition = "DELTA_UNSUPPORTED_DROP_NESTED_COLUMN_FROM_NON_STRUCT_TYPE",
"DELTA_UNSUPPORTED_DROP_NESTED_COLUMN_FROM_NON_STRUCT_TYPE",
parameters = Map(
"struct" -> "IntegerType"
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -477,12 +477,12 @@ trait DeltaErrorsSuiteBase
Some(s"Delta table $table doesn't exist."))
}
checkError(
exception = intercept[DeltaIllegalStateException] {
intercept[DeltaIllegalStateException] {
throw DeltaErrors.differentDeltaTableReadByStreamingSource(
newTableId = "027fb01c-94aa-4cab-87cb-5aab6aec6d17",
oldTableId = "2edf2c02-bb63-44e9-a84c-517fad0db296")
},
condition = "DIFFERENT_DELTA_TABLE_READ_BY_STREAMING_SOURCE",
"DIFFERENT_DELTA_TABLE_READ_BY_STREAMING_SOURCE",
parameters = Map(
"oldTableId" -> "2edf2c02-bb63-44e9-a84c-517fad0db296",
"newTableId" -> "027fb01c-94aa-4cab-87cb-5aab6aec6d17")
Expand Down Expand Up @@ -961,12 +961,12 @@ trait DeltaErrorsSuiteBase
SchemaMergingUtils.mergeSchemas(s1, s2)
}
checkError(
exception = e,
condition = "DELTA_FAILED_TO_MERGE_FIELDS",
e,
"DELTA_FAILED_TO_MERGE_FIELDS",
parameters = Map("currentField" -> "c0", "updateField" -> "c0"))
checkError(
exception = e.getCause.asInstanceOf[DeltaAnalysisException],
condition = "DELTA_MERGE_INCOMPATIBLE_DATATYPE",
e.getCause.asInstanceOf[DeltaAnalysisException],
"DELTA_MERGE_INCOMPATIBLE_DATATYPE",
parameters = Map("currentDataType" -> "IntegerType", "updateDataType" -> "StringType"))
}
{
Expand Down Expand Up @@ -997,13 +997,13 @@ trait DeltaErrorsSuiteBase
}
{
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
throw DeltaErrors.alterTableChangeColumnException(
fieldPath = "a.b.c",
oldField = StructField("c", IntegerType),
newField = StructField("c", LongType))
},
condition = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.b.c",
"oldField" -> "INT",
Expand Down Expand Up @@ -1421,14 +1421,14 @@ trait DeltaErrorsSuiteBase
}
{
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
throw DeltaErrors.constraintDataTypeMismatch(
columnPath = Seq("a", "x"),
columnType = ByteType,
dataType = IntegerType,
constraints = Map("ck1" -> "a > 0", "ck2" -> "hash(b) > 0"))
},
condition = "DELTA_CONSTRAINT_DATA_TYPE_MISMATCH",
"DELTA_CONSTRAINT_DATA_TYPE_MISMATCH",
parameters = Map(
"columnName" -> "a.x",
"columnType" -> "TINYINT",
Expand All @@ -1438,7 +1438,7 @@ trait DeltaErrorsSuiteBase
}
{
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
throw DeltaErrors.generatedColumnsDataTypeMismatch(
columnPath = Seq("a", "x"),
columnType = ByteType,
Expand All @@ -1448,7 +1448,7 @@ trait DeltaErrorsSuiteBase
"gen2" -> "3 + a . x"
))
},
condition = "DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH",
"DELTA_GENERATED_COLUMNS_DATA_TYPE_MISMATCH",
parameters = Map(
"columnName" -> "a.x",
"columnType" -> "TINYINT",
Expand Down Expand Up @@ -1916,10 +1916,10 @@ trait DeltaErrorsSuiteBase
}
{
checkError(
exception = intercept[DeltaIllegalStateException] {
intercept[DeltaIllegalStateException] {
throw MaterializedRowId.missingMetadataException("table_name")
},
condition = "DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING",
"DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING",
parameters = Map(
"rowTrackingColumn" -> "Row ID",
"tableName" -> "table_name"
Expand All @@ -1928,10 +1928,10 @@ trait DeltaErrorsSuiteBase
}
{
checkError(
exception = intercept[DeltaIllegalStateException] {
intercept[DeltaIllegalStateException] {
throw MaterializedRowCommitVersion.missingMetadataException("table_name")
},
condition = "DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING",
"DELTA_MATERIALIZED_ROW_TRACKING_COLUMN_NAME_MISSING",
parameters = Map(
"rowTrackingColumn" -> "Row Commit Version",
"tableName" -> "table_name"
Expand Down
Loading

0 comments on commit 8ee008f

Please sign in to comment.