Skip to content

Commit a545f57

Browse files
committed
clippy
1 parent 221e10b commit a545f57

File tree

2 files changed

+13
-27
lines changed

2 files changed

+13
-27
lines changed

native/core/src/execution/planner.rs

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,11 @@ use datafusion_comet_spark_expr::{create_comet_physical_fun, create_negate_expr}
6969
use crate::execution::operators::ExecutionError::GeneralError;
7070
use crate::execution::shuffle::CompressionCodec;
7171
use crate::execution::spark_plan::SparkPlan;
72-
use crate::parquet::parquet_support::{prepare_object_store, SparkParquetOptions};
72+
use crate::parquet::parquet_support::prepare_object_store;
7373
use datafusion::common::scalar::ScalarStructBuilder;
7474
use datafusion::common::{
7575
tree_node::{Transformed, TransformedResult, TreeNode, TreeNodeRecursion, TreeNodeRewriter},
76-
ExprSchema, JoinType as DFJoinType, ScalarValue,
76+
JoinType as DFJoinType, ScalarValue,
7777
};
7878
use datafusion::datasource::listing::PartitionedFile;
7979
use datafusion::logical_expr::type_coercion::other::get_coerce_type_for_case_expression;
@@ -86,10 +86,6 @@ use datafusion::physical_expr::window::WindowExpr;
8686
use datafusion::physical_expr::LexOrdering;
8787

8888
use crate::parquet::parquet_exec::init_datasource_exec;
89-
use crate::parquet::schema_adapter::SparkSchemaAdapterFactory;
90-
use datafusion::datasource::object_store::ObjectStoreUrl;
91-
use datafusion::datasource::physical_plan::{FileGroup, FileScanConfigBuilder, ParquetSource};
92-
use datafusion::datasource::source::DataSourceExec;
9389
use datafusion::physical_plan::coalesce_batches::CoalesceBatchesExec;
9490
use datafusion::physical_plan::filter::FilterExec as DataFusionFilterExec;
9591
use datafusion_comet_proto::spark_operator::SparkFilePartition;
@@ -112,14 +108,12 @@ use datafusion_comet_spark_expr::{
112108
SparkCastOptions, StartsWith, Stddev, StringSpaceExpr, SubstringExpr, SumDecimal,
113109
TimestampTruncExpr, ToJson, UnboundColumn, Variance,
114110
};
115-
use futures::StreamExt;
116111
use itertools::Itertools;
117112
use jni::objects::GlobalRef;
118113
use num::{BigInt, ToPrimitive};
119114
use object_store::path::Path;
120115
use std::cmp::max;
121116
use std::{collections::HashMap, sync::Arc};
122-
use tokio::runtime::Runtime;
123117
use url::Url;
124118

125119
// For clippy error on type_complexity.
@@ -3512,22 +3506,20 @@ mod tests {
35123506
#[test]
35133507
fn test_struct_field_2() {
35143508
let session_ctx = SessionContext::new();
3515-
let task_ctx = session_ctx.task_ctx();
35163509

35173510
let required_schema = Schema::new(Fields::from(vec![Field::new(
35183511
"c0",
35193512
DataType::List(
35203513
Field::new(
35213514
"element",
35223515
DataType::Struct(Fields::from(vec![
3523-
Field::new("a", DataType::Int32, false).into(),
3524-
Field::new("c", DataType::Utf8, false).into(),
3516+
Field::new("a", DataType::Int32, false),
3517+
Field::new("c", DataType::Utf8, false),
35253518
] as Vec<Field>)),
35263519
false,
35273520
)
35283521
.into(),
3529-
)
3530-
.into(),
3522+
),
35313523
false,
35323524
)]));
35333525

@@ -3553,7 +3545,7 @@ mod tests {
35533545
let rt = Runtime::new().unwrap();
35543546
let result: Vec<_> = rt.block_on(stream.collect());
35553547

3556-
let actual = result.get(0).unwrap().as_ref().unwrap();
3548+
let actual = result.first().unwrap().as_ref().unwrap();
35573549

35583550
let expected = [
35593551
"+----------------+",

native/core/src/parquet/parquet_support.rs

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
use crate::execution::operators::ExecutionError;
1919
use arrow::array::ListArray;
2020
use arrow::compute::can_cast_types;
21-
use arrow::datatypes::Field;
2221
use arrow::{
2322
array::{
2423
cast::AsArray, new_null_array, types::Int32Type, types::TimestampMicrosecondType, Array,
@@ -192,21 +191,20 @@ fn cast_list_to_list(
192191
parquet_options: &SparkParquetOptions,
193192
) -> DataFusionResult<ArrayRef> {
194193
match (from_type, to_type) {
195-
(DataType::List(from_inner_type), DataType::List(to_inner_type)) => {
194+
(DataType::List(_), DataType::List(to_inner_type)) => {
196195
//dbg!(from_type);
197196
//dbg!(to_type);
198197
//dbg!(from_inner_type);
199198
//dbg!(to_inner_type);
200199

201200
let cast_field = cast_array(
202-
array.values().clone(),
201+
Arc::clone(array.values()),
203202
to_inner_type.data_type(),
204203
parquet_options,
205-
)
206-
.unwrap();
204+
)?;
207205

208206
Ok(Arc::new(ListArray::new(
209-
to_inner_type.clone(),
207+
Arc::clone(to_inner_type),
210208
array.offsets().clone(),
211209
cast_field,
212210
array.nulls().cloned(),
@@ -318,9 +316,7 @@ pub(crate) fn prepare_object_store(
318316
#[cfg(test)]
319317
mod tests {
320318
use crate::parquet::parquet_support::prepare_object_store;
321-
use arrow::array::{
322-
Array, ArrayBuilder, Int32Builder, ListBuilder, StringBuilder, StructBuilder,
323-
};
319+
use arrow::array::{Array, Int32Builder, ListBuilder, StringBuilder, StructBuilder};
324320
use arrow::compute::{can_cast_types, cast_with_options, CastOptions};
325321
use arrow::datatypes::{DataType, Field, Fields};
326322
use datafusion::execution::object_store::ObjectStoreUrl;
@@ -404,8 +400,6 @@ mod tests {
404400
Arc::new(Field::new("c", DataType::Utf8, true)),
405401
]);
406402

407-
let struct_field = Field::new_list_field(DataType::Struct(fields.clone()), true);
408-
let list_field = Field::new("list", DataType::List(Arc::new(struct_field)), true);
409403
let a_col_builder = Int32Builder::with_capacity(1);
410404
let b_col_builder = StringBuilder::new();
411405
let c_col_builder = StringBuilder::new();
@@ -447,8 +441,8 @@ mod tests {
447441
Field::new(
448442
"element",
449443
DataType::Struct(Fields::from(vec![
450-
Field::new("a", DataType::Int32, false).into(),
451-
Field::new("c", DataType::Utf8, false).into(),
444+
Field::new("a", DataType::Int32, false),
445+
Field::new("c", DataType::Utf8, false),
452446
] as Vec<Field>)),
453447
false,
454448
)

0 commit comments

Comments
 (0)