@@ -69,11 +69,11 @@ use datafusion_comet_spark_expr::{create_comet_physical_fun, create_negate_expr}
69
69
use crate :: execution:: operators:: ExecutionError :: GeneralError ;
70
70
use crate :: execution:: shuffle:: CompressionCodec ;
71
71
use crate :: execution:: spark_plan:: SparkPlan ;
72
- use crate :: parquet:: parquet_support:: { prepare_object_store, SparkParquetOptions } ;
72
+ use crate :: parquet:: parquet_support:: prepare_object_store;
73
73
use datafusion:: common:: scalar:: ScalarStructBuilder ;
74
74
use datafusion:: common:: {
75
75
tree_node:: { Transformed , TransformedResult , TreeNode , TreeNodeRecursion , TreeNodeRewriter } ,
76
- ExprSchema , JoinType as DFJoinType , ScalarValue ,
76
+ JoinType as DFJoinType , ScalarValue ,
77
77
} ;
78
78
use datafusion:: datasource:: listing:: PartitionedFile ;
79
79
use datafusion:: logical_expr:: type_coercion:: other:: get_coerce_type_for_case_expression;
@@ -86,10 +86,6 @@ use datafusion::physical_expr::window::WindowExpr;
86
86
use datafusion:: physical_expr:: LexOrdering ;
87
87
88
88
use crate :: parquet:: parquet_exec:: init_datasource_exec;
89
- use crate :: parquet:: schema_adapter:: SparkSchemaAdapterFactory ;
90
- use datafusion:: datasource:: object_store:: ObjectStoreUrl ;
91
- use datafusion:: datasource:: physical_plan:: { FileGroup , FileScanConfigBuilder , ParquetSource } ;
92
- use datafusion:: datasource:: source:: DataSourceExec ;
93
89
use datafusion:: physical_plan:: coalesce_batches:: CoalesceBatchesExec ;
94
90
use datafusion:: physical_plan:: filter:: FilterExec as DataFusionFilterExec ;
95
91
use datafusion_comet_proto:: spark_operator:: SparkFilePartition ;
@@ -112,14 +108,12 @@ use datafusion_comet_spark_expr::{
112
108
SparkCastOptions , StartsWith , Stddev , StringSpaceExpr , SubstringExpr , SumDecimal ,
113
109
TimestampTruncExpr , ToJson , UnboundColumn , Variance ,
114
110
} ;
115
- use futures:: StreamExt ;
116
111
use itertools:: Itertools ;
117
112
use jni:: objects:: GlobalRef ;
118
113
use num:: { BigInt , ToPrimitive } ;
119
114
use object_store:: path:: Path ;
120
115
use std:: cmp:: max;
121
116
use std:: { collections:: HashMap , sync:: Arc } ;
122
- use tokio:: runtime:: Runtime ;
123
117
use url:: Url ;
124
118
125
119
// For clippy error on type_complexity.
@@ -3512,22 +3506,20 @@ mod tests {
3512
3506
#[ test]
3513
3507
fn test_struct_field_2 ( ) {
3514
3508
let session_ctx = SessionContext :: new ( ) ;
3515
- let task_ctx = session_ctx. task_ctx ( ) ;
3516
3509
3517
3510
let required_schema = Schema :: new ( Fields :: from ( vec ! [ Field :: new(
3518
3511
"c0" ,
3519
3512
DataType :: List (
3520
3513
Field :: new(
3521
3514
"element" ,
3522
3515
DataType :: Struct ( Fields :: from( vec![
3523
- Field :: new( "a" , DataType :: Int32 , false ) . into ( ) ,
3524
- Field :: new( "c" , DataType :: Utf8 , false ) . into ( ) ,
3516
+ Field :: new( "a" , DataType :: Int32 , false ) ,
3517
+ Field :: new( "c" , DataType :: Utf8 , false ) ,
3525
3518
] as Vec <Field >) ) ,
3526
3519
false ,
3527
3520
)
3528
3521
. into( ) ,
3529
- )
3530
- . into( ) ,
3522
+ ) ,
3531
3523
false ,
3532
3524
) ] ) ) ;
3533
3525
@@ -3553,7 +3545,7 @@ mod tests {
3553
3545
let rt = Runtime :: new ( ) . unwrap ( ) ;
3554
3546
let result: Vec < _ > = rt. block_on ( stream. collect ( ) ) ;
3555
3547
3556
- let actual = result. get ( 0 ) . unwrap ( ) . as_ref ( ) . unwrap ( ) ;
3548
+ let actual = result. first ( ) . unwrap ( ) . as_ref ( ) . unwrap ( ) ;
3557
3549
3558
3550
let expected = [
3559
3551
"+----------------+" ,
0 commit comments