Skip to content

Commit ccc4a95

Browse files
authored
fix: attempt to divide by zero error on decimal division (#172)
* fix: attempt to divide by zero error on decimal division * More * Add test for modulo
1 parent e83635a commit ccc4a95

File tree

2 files changed

+26
-2
lines changed

2 files changed

+26
-2
lines changed

core/src/execution/datafusion/expressions/scalar_funcs.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -480,10 +480,11 @@ fn spark_decimal_div(
480480
let l_mul = ten.pow(l_exp);
481481
let r_mul = ten.pow(r_exp);
482482
let five = BigInt::from(5);
483+
let zero = BigInt::from(0);
483484
let result: Decimal128Array = arrow::compute::kernels::arity::binary(left, right, |l, r| {
484485
let l = BigInt::from(l) * &l_mul;
485486
let r = BigInt::from(r) * &r_mul;
486-
let div = &l / &r;
487+
let div = if r.eq(&zero) { zero.clone() } else { &l / &r };
487488
let res = if div.is_negative() {
488489
div - &five
489490
} else {

spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import java.util
2424
import org.apache.hadoop.fs.Path
2525
import org.apache.spark.sql.{CometTestBase, DataFrame, Row}
2626
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
27-
import org.apache.spark.sql.functions.expr
27+
import org.apache.spark.sql.functions.{expr, lit}
2828
import org.apache.spark.sql.internal.SQLConf
2929
import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE
3030
import org.apache.spark.sql.types.{Decimal, DecimalType, StructType}
@@ -47,6 +47,29 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
4747
}
4848
}
4949

50+
test("decimals divide by zero") {
51+
// TODO: enable Spark 3.2 & 3.3 tests after supporting decimal divide operation
52+
assume(isSpark34Plus)
53+
54+
Seq(true, false).foreach { dictionary =>
55+
withSQLConf(
56+
SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> "false",
57+
"parquet.enable.dictionary" -> dictionary.toString) {
58+
withTempPath { dir =>
59+
val data = makeDecimalRDD(10, DecimalType(18, 10), dictionary)
60+
data.write.parquet(dir.getCanonicalPath)
61+
readParquetFile(dir.getCanonicalPath) { df =>
62+
{
63+
val decimalLiteral = Decimal(0.00)
64+
val cometDf = df.select($"dec" / decimalLiteral, $"dec" % decimalLiteral)
65+
checkSparkAnswerAndOperator(cometDf)
66+
}
67+
}
68+
}
69+
}
70+
}
71+
}
72+
5073
test("bitwise shift with different left/right types") {
5174
Seq(false, true).foreach { dictionary =>
5275
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {

0 commit comments

Comments
 (0)