Skip to content

Commit c0ca4b4

Browse files
authored
Minor: clean up error entries (#13521)
* Minor: clean up error entries * Minor: clean up error entries
1 parent 4e5e765 commit c0ca4b4

File tree

9 files changed

+30
-38
lines changed

9 files changed

+30
-38
lines changed

datafusion/core/src/dataframe/mod.rs

+13-14
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,8 @@ use arrow::datatypes::{DataType, Field};
5050
use arrow_schema::{Schema, SchemaRef};
5151
use datafusion_common::config::{CsvOptions, JsonOptions};
5252
use datafusion_common::{
53-
plan_err, Column, DFSchema, DataFusionError, ParamValues, SchemaError, UnnestOptions,
53+
exec_err, not_impl_err, plan_err, Column, DFSchema, DataFusionError, ParamValues,
54+
SchemaError, UnnestOptions,
5455
};
5556
use datafusion_expr::dml::InsertOp;
5657
use datafusion_expr::{case, is_null, lit, SortExpr};
@@ -869,16 +870,16 @@ impl DataFrame {
869870
for result in describe_record_batch.iter() {
870871
let array_ref = match result {
871872
Ok(df) => {
872-
let batchs = df.clone().collect().await;
873-
match batchs {
874-
Ok(batchs)
875-
if batchs.len() == 1
876-
&& batchs[0]
873+
let batches = df.clone().collect().await;
874+
match batches {
875+
Ok(batches)
876+
if batches.len() == 1
877+
&& batches[0]
877878
.column_by_name(field.name())
878879
.is_some() =>
879880
{
880881
let column =
881-
batchs[0].column_by_name(field.name()).unwrap();
882+
batches[0].column_by_name(field.name()).unwrap();
882883

883884
if column.data_type().is_null() {
884885
Arc::new(StringArray::from(vec!["null"]))
@@ -901,9 +902,7 @@ impl DataFrame {
901902
{
902903
Arc::new(StringArray::from(vec!["null"]))
903904
}
904-
Err(other_err) => {
905-
panic!("{other_err}")
906-
}
905+
Err(e) => return exec_err!("{}", e),
907906
};
908907
array_datas.push(array_ref);
909908
}
@@ -1564,10 +1563,10 @@ impl DataFrame {
15641563
writer_options: Option<CsvOptions>,
15651564
) -> Result<Vec<RecordBatch>, DataFusionError> {
15661565
if options.insert_op != InsertOp::Append {
1567-
return Err(DataFusionError::NotImplemented(format!(
1566+
return not_impl_err!(
15681567
"{} is not implemented for DataFrame::write_csv.",
15691568
options.insert_op
1570-
)));
1569+
);
15711570
}
15721571

15731572
let format = if let Some(csv_opts) = writer_options {
@@ -1625,10 +1624,10 @@ impl DataFrame {
16251624
writer_options: Option<JsonOptions>,
16261625
) -> Result<Vec<RecordBatch>, DataFusionError> {
16271626
if options.insert_op != InsertOp::Append {
1628-
return Err(DataFusionError::NotImplemented(format!(
1627+
return not_impl_err!(
16291628
"{} is not implemented for DataFrame::write_json.",
16301629
options.insert_op
1631-
)));
1630+
);
16321631
}
16331632

16341633
let format = if let Some(json_opts) = writer_options {

datafusion/core/src/dataframe/parquet.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ use super::{
2626
};
2727

2828
use datafusion_common::config::TableParquetOptions;
29+
use datafusion_common::not_impl_err;
2930
use datafusion_expr::dml::InsertOp;
3031

3132
impl DataFrame {
@@ -59,10 +60,10 @@ impl DataFrame {
5960
writer_options: Option<TableParquetOptions>,
6061
) -> Result<Vec<RecordBatch>, DataFusionError> {
6162
if options.insert_op != InsertOp::Append {
62-
return Err(DataFusionError::NotImplemented(format!(
63+
return not_impl_err!(
6364
"{} is not implemented for DataFrame::write_parquet.",
6465
options.insert_op
65-
)));
66+
);
6667
}
6768

6869
let format = if let Some(parquet_opts) = writer_options {

datafusion/core/src/datasource/file_format/avro.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ use std::sync::Arc;
2525
use arrow::datatypes::Schema;
2626
use arrow::datatypes::SchemaRef;
2727
use async_trait::async_trait;
28+
use datafusion_common::internal_err;
2829
use datafusion_common::parsers::CompressionTypeVariant;
29-
use datafusion_common::DataFusionError;
3030
use datafusion_common::GetExt;
3131
use datafusion_common::DEFAULT_AVRO_EXTENSION;
3232
use datafusion_physical_expr::PhysicalExpr;
@@ -105,9 +105,7 @@ impl FileFormat for AvroFormat {
105105
let ext = self.get_ext();
106106
match file_compression_type.get_variant() {
107107
CompressionTypeVariant::UNCOMPRESSED => Ok(ext),
108-
_ => Err(DataFusionError::Internal(
109-
"Avro FileFormat does not support compression.".into(),
110-
)),
108+
_ => internal_err!("Avro FileFormat does not support compression."),
111109
}
112110
}
113111

datafusion/core/src/datasource/file_format/parquet.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ use datafusion_common::file_options::parquet_writer::ParquetWriterOptions;
4949
use datafusion_common::parsers::CompressionTypeVariant;
5050
use datafusion_common::stats::Precision;
5151
use datafusion_common::{
52-
internal_datafusion_err, not_impl_err, DataFusionError, GetExt,
52+
internal_datafusion_err, internal_err, not_impl_err, DataFusionError, GetExt,
5353
DEFAULT_PARQUET_EXTENSION,
5454
};
5555
use datafusion_common_runtime::SpawnedTask;
@@ -323,9 +323,7 @@ impl FileFormat for ParquetFormat {
323323
let ext = self.get_ext();
324324
match file_compression_type.get_variant() {
325325
CompressionTypeVariant::UNCOMPRESSED => Ok(ext),
326-
_ => Err(DataFusionError::Internal(
327-
"Parquet FileFormat does not support compression.".into(),
328-
)),
326+
_ => internal_err!("Parquet FileFormat does not support compression."),
329327
}
330328
}
331329

datafusion/core/src/datasource/file_format/write/demux.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ use datafusion_common::cast::{
3737
as_boolean_array, as_date32_array, as_date64_array, as_int32_array, as_int64_array,
3838
as_string_array, as_string_view_array,
3939
};
40-
use datafusion_common::{exec_datafusion_err, DataFusionError};
40+
use datafusion_common::{exec_datafusion_err, not_impl_err, DataFusionError};
4141
use datafusion_common_runtime::SpawnedTask;
4242
use datafusion_execution::TaskContext;
4343

@@ -438,10 +438,10 @@ fn compute_partition_keys_by_row<'a>(
438438
)
439439
}
440440
_ => {
441-
return Err(DataFusionError::NotImplemented(format!(
441+
return not_impl_err!(
442442
"it is not yet supported to write to hive partitions with datatype {}",
443443
dtype
444-
)))
444+
)
445445
}
446446
}
447447

datafusion/core/src/datasource/physical_plan/file_scan_config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1165,7 +1165,7 @@ mod tests {
11651165
})
11661166
.collect::<Vec<_>>()
11671167
})
1168-
.map_err(|e| e.to_string().leak() as &'static str);
1168+
.map_err(|e| e.strip_backtrace().leak() as &'static str);
11691169

11701170
assert_eq!(results_by_name, case.expected_result, "{}", case.name);
11711171
}

datafusion/core/src/datasource/physical_plan/statistics.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ use arrow::{
3434
};
3535
use arrow_array::RecordBatch;
3636
use arrow_schema::SchemaRef;
37-
use datafusion_common::{DataFusionError, Result};
37+
use datafusion_common::{plan_err, DataFusionError, Result};
3838
use datafusion_physical_expr::{expressions::Column, PhysicalSortExpr};
3939
use datafusion_physical_expr_common::sort_expr::LexOrdering;
4040

@@ -232,9 +232,7 @@ impl MinMaxStatistics {
232232

233233
// check that sort columns are non-nullable
234234
if field.is_nullable() {
235-
return Err(DataFusionError::Plan(
236-
"cannot sort by nullable column".to_string(),
237-
));
235+
return plan_err!("cannot sort by nullable column");
238236
}
239237

240238
Ok(SortColumn {

datafusion/sql/src/select.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use crate::utils::{
2525
};
2626

2727
use datafusion_common::tree_node::{TreeNode, TreeNodeRecursion};
28-
use datafusion_common::{not_impl_err, plan_err, DataFusionError, Result};
28+
use datafusion_common::{not_impl_err, plan_err, Result};
2929
use datafusion_common::{RecursionUnnestOption, UnnestOptions};
3030
use datafusion_expr::expr::{Alias, PlannedReplaceSelectItem, WildcardOptions};
3131
use datafusion_expr::expr_rewriter::{
@@ -657,9 +657,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
657657
} = options;
658658

659659
if opt_rename.is_some() {
660-
Err(DataFusionError::NotImplemented(
661-
"wildcard * with RENAME not supported ".to_string(),
662-
))
660+
not_impl_err!("wildcard * with RENAME not supported ")
663661
} else {
664662
Ok(())
665663
}

docs/source/library-user-guide/catalogs.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -73,9 +73,9 @@ impl SchemaProvider for MemorySchemaProvider {
7373
table: Arc<dyn TableProvider>,
7474
) -> Result<Option<Arc<dyn TableProvider>>> {
7575
if self.table_exist(name.as_str()) {
76-
return Err(DataFusionError::Execution(format!(
76+
return exec_err!(
7777
"The table {name} already exists"
78-
)));
78+
);
7979
}
8080
Ok(self.tables.insert(name, table))
8181
}

0 commit comments

Comments
 (0)