Skip to content

Commit 5e374f8

Browse files
yukkitroseboy-liu
authored andcommitted
Upgrade datafusion version to 27.0.0
1 parent 6a0e5f4 commit 5e374f8

File tree

72 files changed

+1598
-964
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+1598
-964
lines changed

Cargo.lock

Lines changed: 224 additions & 108 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,11 @@ crossbeam-channel = "0.5"
5555
ctrlc = "3"
5656
dashmap = "5.2"
5757
derive_builder = "0.11"
58-
arrow = { version = "36.0.0", features = ["prettyprint"] }
59-
arrow-schema = { version = "36.0.0", features = ["serde"] }
60-
arrow-flight = { version = "36.0.0" }
61-
datafusion-proto = { git = "https://github.com/cnosdb/arrow-datafusion.git", branch = "22.0.0" }
62-
datafusion = { git = "https://github.com/cnosdb/arrow-datafusion.git", branch = "22.0.0" }
58+
arrow = { version = "42.0.0", features = ["prettyprint"] }
59+
arrow-schema = { version = "42.0.0", features = ["serde"] }
60+
arrow-flight = { version = "42.0.0" }
61+
datafusion-proto = { git = "https://github.com/cnosdb/arrow-datafusion.git", branch = "27.0.0" }
62+
datafusion = { git = "https://github.com/cnosdb/arrow-datafusion.git", branch = "27.0.0" }
6363
diff = "0.1.13"
6464
dirs = "4.0.0"
6565
env_logger = "0.9"
@@ -77,7 +77,7 @@ nom = "7.1.1"
7777
num-traits = "0.2.14"
7878
num_cpus = "1.13.0"
7979
num_enum = "0.5.7"
80-
object_store = { version = "0.5.2", features = ["aws", "gcp", "azure"] }
80+
object_store = { version = "0.6.1", features = ["aws", "gcp", "azure"] }
8181
once_cell = "1.12.0"
8282
openraft = { git = "https://github.com/datafuselabs/openraft", rev = "914fcb4dad32a2f187b808298048e9e8b912977f", features = ["serde"] }
8383
openssl = { version = "0.10.48", features = ["vendored"] }
@@ -117,8 +117,8 @@ tokio = { version = "1.21" }
117117
tokio-stream = "0.1"
118118
tokio-util = { version = "0.7.0" }
119119
toml = "0.5.9"
120-
tonic = "0.8"
121-
tonic-build = "0.8"
120+
tonic = "0.9"
121+
tonic-build = "0.9"
122122
tower = "0.4.13"
123123
tracing = "0.1.35"
124124
tracing-appender = "0.2.2"

client/src/print_format.rs

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ macro_rules! batches_to_json {
3333
let mut bytes = vec![];
3434
{
3535
let mut writer = $WRITER::new(&mut bytes);
36-
writer.write_batches($batches)?;
36+
for batch in $batches {
37+
writer.write(batch)?;
38+
}
3739
writer.finish()?;
3840
}
3941
String::from_utf8(bytes).map_err(|e| DataFusionError::Execution(e.to_string()))?
@@ -63,7 +65,9 @@ impl PrintFormat {
6365
Self::Csv => println!("{}", print_batches_with_sep(batches, b',')?),
6466
Self::Tsv => println!("{}", print_batches_with_sep(batches, b'\t')?),
6567
Self::Table => pretty::print_batches(batches)?,
66-
Self::Json => println!("{}", batches_to_json!(ArrayWriter, batches)),
68+
Self::Json => {
69+
println!("{}", batches_to_json!(ArrayWriter, batches))
70+
}
6771
Self::NdJson => {
6872
println!("{}", batches_to_json!(LineDelimitedWriter, batches))
6973
}
@@ -88,7 +92,6 @@ mod tests {
8892

8993
use datafusion::arrow::array::Int32Array;
9094
use datafusion::arrow::datatypes::{DataType, Field, Schema};
91-
use datafusion::from_slice::FromSlice;
9295

9396
use super::*;
9497

@@ -106,15 +109,15 @@ mod tests {
106109
let batch = RecordBatch::try_new(
107110
schema,
108111
vec![
109-
Arc::new(Int32Array::from_slice([1, 2, 3])),
110-
Arc::new(Int32Array::from_slice([4, 5, 6])),
111-
Arc::new(Int32Array::from_slice([7, 8, 9])),
112+
Arc::new(Int32Array::from(vec![1, 2, 3])),
113+
Arc::new(Int32Array::from(vec![4, 5, 6])),
114+
Arc::new(Int32Array::from(vec![7, 8, 9])),
112115
],
113116
)
114117
.unwrap();
115118

116-
let batches = vec![batch];
117-
let r = print_batches_with_sep(&batches, b',').unwrap();
119+
let batches = &[batch];
120+
let r = print_batches_with_sep(batches, b',').unwrap();
118121
assert_eq!("a,b,c\n1,4,7\n2,5,8\n3,6,9\n", r);
119122
}
120123

@@ -136,14 +139,14 @@ mod tests {
136139
let batch = RecordBatch::try_new(
137140
schema,
138141
vec![
139-
Arc::new(Int32Array::from_slice([1, 2, 3])),
140-
Arc::new(Int32Array::from_slice([4, 5, 6])),
141-
Arc::new(Int32Array::from_slice([7, 8, 9])),
142+
Arc::new(Int32Array::from(vec![1, 2, 3])),
143+
Arc::new(Int32Array::from(vec![4, 5, 6])),
144+
Arc::new(Int32Array::from(vec![7, 8, 9])),
142145
],
143146
)
144147
.unwrap();
145148

146-
let batches = vec![batch];
149+
let batches = vec![&batch];
147150
let r = batches_to_json!(ArrayWriter, &batches);
148151
assert_eq!(
149152
"[{\"a\":1,\"b\":4,\"c\":7},{\"a\":2,\"b\":5,\"c\":8},{\"a\":3,\"b\":6,\"c\":9}]",

common/models/src/arrow_array.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use arrow_schema::{Field, Schema};
1+
use arrow_schema::Schema;
22
use datafusion::arrow::array::{
33
ArrayBuilder, BooleanBuilder, Float64Builder, Int64Builder, StringBuilder,
44
TimestampMicrosecondBuilder, TimestampMillisecondBuilder, TimestampNanosecondBuilder,
@@ -51,7 +51,7 @@ pub fn build_arrow_array_builders(
5151
schema
5252
.fields()
5353
.iter()
54-
.map(|f: &Field| build_arrow_array_builder(f.data_type(), batch_size))
54+
.map(|f| build_arrow_array_builder(f.data_type(), batch_size))
5555
.collect::<Result<Vec<_>, ArrowError>>()
5656
}
5757

common/models/src/schema.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ use datafusion::datasource::file_format::json::JsonFormat;
2626
use datafusion::datasource::file_format::parquet::ParquetFormat;
2727
use datafusion::datasource::file_format::FileFormat;
2828
use datafusion::datasource::listing::ListingOptions;
29-
use datafusion::error::Result as DataFusionResult;
29+
use datafusion::error::{DataFusionError, Result as DataFusionResult};
3030
use datafusion::prelude::Column;
3131
use datafusion::scalar::ScalarValue;
3232
use derive_builder::Builder;
@@ -127,6 +127,11 @@ impl ExternalTableSchema {
127127
FileType::JSON => {
128128
Arc::new(JsonFormat::default().with_file_compression_type(file_compression_type))
129129
}
130+
FileType::ARROW => {
131+
return Err(DataFusionError::NotImplemented(
132+
"Arrow external table.".to_string(),
133+
))
134+
}
130135
};
131136

132137
let options = ListingOptions::new(file_format)

0 commit comments

Comments
 (0)