Skip to content

Commit 0c58969

Browse files
authored
Move to the 2024 rust edition (#2490)
given the number of unstable features we use, why not enjoy the new [2024 edition](https://blog.rust-lang.org/2025/02/20/Rust-1.85.0.html).
1 parent dcc482b commit 0c58969

File tree

469 files changed

+1170
-1107
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

469 files changed

+1170
-1107
lines changed

Cargo.toml

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,8 @@ include = [
4444
"flatbuffers/**/*.fbs",
4545
"README.md",
4646
]
47-
edition = "2021"
48-
rust-version = "1.84"
47+
edition = "2024"
48+
rust-version = "1.86"
4949
readme = "README.md"
5050
categories = ["database-implementations", "data-structures", "compression"]
5151

bench-vortex/src/bin/clickbench.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,12 @@ use std::fs::{self, File};
22
use std::path::PathBuf;
33
use std::time::{Duration, Instant};
44

5-
use bench_vortex::clickbench::{self, clickbench_queries, HITS_SCHEMA};
6-
use bench_vortex::display::{print_measurements_json, render_table, DisplayFormat, RatioMode};
5+
use bench_vortex::clickbench::{self, HITS_SCHEMA, clickbench_queries};
6+
use bench_vortex::display::{DisplayFormat, RatioMode, print_measurements_json, render_table};
77
use bench_vortex::measurements::QueryMeasurement;
88
use bench_vortex::{
9-
default_env_filter, execute_physical_plan, feature_flagged_allocator, get_session_with_cache,
10-
idempotent, physical_plan, Format, IdempotentPath as _,
9+
Format, IdempotentPath as _, default_env_filter, execute_physical_plan,
10+
feature_flagged_allocator, get_session_with_cache, idempotent, physical_plan,
1111
};
1212
use clap::Parser;
1313
use datafusion_physical_plan::display::DisplayableExecutionPlan;
@@ -18,7 +18,7 @@ use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
1818
use tokio::runtime::Builder;
1919
use tracing::info_span;
2020
use tracing_futures::Instrument;
21-
use vortex::error::{vortex_panic, VortexExpect};
21+
use vortex::error::{VortexExpect, vortex_panic};
2222

2323
feature_flagged_allocator!();
2424

bench-vortex/src/bin/compress.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
1-
use bench_vortex::compress::bench::{benchmark_compress, CompressMeasurements};
1+
use bench_vortex::compress::bench::{CompressMeasurements, benchmark_compress};
2+
use bench_vortex::datasets::BenchmarkDataset;
23
use bench_vortex::datasets::public_bi_data::PBIDataset::{
34
AirlineSentiment, Arade, Bimbo, CMSprovider, Euro2016, Food, HashTags,
45
};
56
use bench_vortex::datasets::struct_list_of_ints::StructListOfInts;
67
use bench_vortex::datasets::taxi_data::TaxiData;
78
use bench_vortex::datasets::tpch_l_comment::{TPCHLCommentCanonical, TPCHLCommentChunked};
8-
use bench_vortex::datasets::BenchmarkDataset;
9-
use bench_vortex::display::{print_measurements_json, render_table, DisplayFormat, RatioMode};
10-
use bench_vortex::{default_env_filter, feature_flagged_allocator, setup_logger, Format};
9+
use bench_vortex::display::{DisplayFormat, RatioMode, print_measurements_json, render_table};
10+
use bench_vortex::{Format, default_env_filter, feature_flagged_allocator, setup_logger};
1111
use clap::Parser;
1212
use indicatif::ProgressBar;
1313
use regex::Regex;

bench-vortex/src/bin/notimplemented.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ use vortex::arrays::{
1010
VarBinViewArray,
1111
};
1212
use vortex::buffer::buffer;
13-
use vortex::datetime_dtype::{TemporalMetadata, TimeUnit, TIME_ID};
13+
use vortex::datetime_dtype::{TIME_ID, TemporalMetadata, TimeUnit};
1414
use vortex::dtype::{DType, ExtDType, Nullability, PType};
1515
use vortex::encodings::alp::{ALPArray, Exponents, RDEncoder};
1616
use vortex::encodings::bytebool::ByteBoolArray;

bench-vortex/src/bin/random_access.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@ use std::process::ExitCode;
22

33
use bench_vortex::bench_run::run_with_setup;
44
use bench_vortex::datasets::taxi_data::{taxi_data_parquet, taxi_data_vortex};
5-
use bench_vortex::display::{print_measurements_json, render_table, DisplayFormat, RatioMode};
5+
use bench_vortex::display::{DisplayFormat, RatioMode, print_measurements_json, render_table};
66
use bench_vortex::measurements::TimingMeasurement;
77
use bench_vortex::random_access::take::{take_parquet, take_vortex_tokio};
8-
use bench_vortex::{default_env_filter, feature_flagged_allocator, setup_logger, Format};
8+
use bench_vortex::{Format, default_env_filter, feature_flagged_allocator, setup_logger};
99
use clap::Parser;
1010
use indicatif::ProgressBar;
1111
use tokio::runtime::{Builder, Runtime};
12-
use vortex::buffer::{buffer, Buffer};
12+
use vortex::buffer::{Buffer, buffer};
1313

1414
feature_flagged_allocator!();
1515

bench-vortex/src/bin/tpch.rs

+11-9
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,16 @@
11
use std::process::ExitCode;
22
use std::time::{Duration, Instant};
33

4-
use bench_vortex::display::{print_measurements_json, render_table, DisplayFormat, RatioMode};
4+
use bench_vortex::display::{DisplayFormat, RatioMode, print_measurements_json, render_table};
55
use bench_vortex::measurements::QueryMeasurement;
66
use bench_vortex::metrics::MetricsSetExt;
77
use bench_vortex::tpch::dbgen::{DBGen, DBGenOptions};
8-
use bench_vortex::tpch::duckdb::{generate_tpch, DuckdbTpchOptions};
8+
use bench_vortex::tpch::duckdb::{DuckdbTpchOptions, generate_tpch};
99
use bench_vortex::tpch::{
10-
load_datasets, run_tpch_query, tpch_queries, EXPECTED_ROW_COUNTS_SF1, EXPECTED_ROW_COUNTS_SF10,
11-
TPC_H_ROW_COUNT_ARRAY_LENGTH,
10+
EXPECTED_ROW_COUNTS_SF1, EXPECTED_ROW_COUNTS_SF10, TPC_H_ROW_COUNT_ARRAY_LENGTH, load_datasets,
11+
run_tpch_query, tpch_queries,
1212
};
13-
use bench_vortex::{default_env_filter, feature_flagged_allocator, setup_logger, Format};
13+
use bench_vortex::{Format, default_env_filter, feature_flagged_allocator, setup_logger};
1414
use clap::{Parser, ValueEnum};
1515
use datafusion_physical_plan::metrics::{Label, MetricsSet};
1616
use indicatif::ProgressBar;
@@ -113,13 +113,15 @@ fn main() -> ExitCode {
113113
//
114114
// The folder must already be populated with data!
115115
if !tpch_benchmark_remote_data_dir.ends_with("/") {
116-
warn!("Supply a --use-remote-data-dir argument which ends in a slash e.g. s3://vortex-bench-dev/parquet/");
116+
warn!(
117+
"Supply a --use-remote-data-dir argument which ends in a slash e.g. s3://vortex-bench-dev/parquet/"
118+
);
117119
}
118120
info!(
119121
concat!(
120-
"Assuming data already exists at this remote (e.g. S3, GCS) URL: {}.\n",
121-
"If it does not, you should kill this command, locally generate the files (by running without\n",
122-
"--use-remote-data-dir) and upload data/tpch/1/ to some remote location.",
122+
"Assuming data already exists at this remote (e.g. S3, GCS) URL: {}.\n",
123+
"If it does not, you should kill this command, locally generate the files (by running without\n",
124+
"--use-remote-data-dir) and upload data/tpch/1/ to some remote location.",
123125
),
124126
tpch_benchmark_remote_data_dir,
125127
);

bench-vortex/src/clickbench.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,18 @@ use datafusion::datasource::listing::{
77
ListingOptions, ListingTable, ListingTableConfig, ListingTableUrl,
88
};
99
use datafusion::prelude::{ParquetReadOptions, SessionContext};
10-
use futures::{stream, StreamExt, TryStreamExt};
11-
use tokio::fs::{create_dir_all, OpenOptions};
10+
use futures::{StreamExt, TryStreamExt, stream};
11+
use tokio::fs::{OpenOptions, create_dir_all};
1212
use tracing::info;
13+
use vortex::TryIntoArray;
1314
use vortex::arrow::FromArrowType;
1415
use vortex::dtype::DType;
15-
use vortex::error::{vortex_err, VortexError};
16-
use vortex::file::{VortexWriteOptions, VORTEX_FILE_EXTENSION};
16+
use vortex::error::{VortexError, vortex_err};
17+
use vortex::file::{VORTEX_FILE_EXTENSION, VortexWriteOptions};
1718
use vortex::stream::ArrayStreamAdapter;
18-
use vortex::TryIntoArray;
1919
use vortex_datafusion::persistent::VortexFormat;
2020

21-
use crate::{idempotent_async, CTX};
21+
use crate::{CTX, idempotent_async};
2222

2323
pub static HITS_SCHEMA: LazyLock<Schema> = LazyLock::new(|| {
2424
use DataType::*;

bench-vortex/src/compress/bench.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,12 @@ use vortex::arrays::ChunkedArray;
1010
use vortex::nbytes::NBytes;
1111
use vortex::{ArrayExt, ArrayRef};
1212

13+
use crate::Format;
1314
use crate::bench_run::run;
1415
use crate::compress::chunked_to_vec_record_batch;
1516
use crate::compress::parquet::{parquet_compress_write, parquet_decompress_read};
1617
use crate::compress::vortex::{vortex_compress_write, vortex_decompress_read};
1718
use crate::measurements::{CustomUnitMeasurement, ThroughputMeasurement};
18-
use crate::Format;
1919

2020
#[derive(Default)]
2121
pub struct CompressMeasurements {

bench-vortex/src/compress/parquet.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ use std::sync::Arc;
44
use arrow_array::RecordBatch;
55
use arrow_schema::Schema;
66
use bytes::Bytes;
7-
use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder;
87
use parquet::arrow::ArrowWriter;
8+
use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder;
99
use parquet::basic::Compression;
1010
use parquet::file::properties::WriterProperties;
1111

bench-vortex/src/compress/vortex.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@ use std::io::Cursor;
33
use arrow_array::ArrayRef;
44
use bytes::Bytes;
55
use futures::TryStreamExt;
6+
use vortex::Array;
67
use vortex::arrow::IntoArrowArray;
78
use vortex::error::VortexResult;
89
use vortex::file::{VortexOpenOptions, VortexWriteOptions};
910
use vortex::stream::ArrayStreamArrayExt;
10-
use vortex::Array;
1111

1212
#[inline(never)]
1313
pub async fn vortex_compress_write(array: &dyn Array, buf: &mut Vec<u8>) -> VortexResult<u64> {

bench-vortex/src/datasets/public_bi_data.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -8,23 +8,23 @@ use std::sync::LazyLock;
88

99
use async_trait::async_trait;
1010
use enum_iterator::Sequence;
11-
use futures::{stream, StreamExt, TryStreamExt};
12-
use humansize::{format_size, DECIMAL};
11+
use futures::{StreamExt, TryStreamExt, stream};
12+
use humansize::{DECIMAL, format_size};
1313
use log::{debug, info};
1414
use reqwest::Url;
1515
use tokio::fs::File;
1616
use vortex::aliases::hash_map::HashMap;
1717
use vortex::arrays::ChunkedArray;
18-
use vortex::error::{vortex_err, VortexExpect, VortexResult};
18+
use vortex::error::{VortexExpect, VortexResult, vortex_err};
1919
use vortex::file::{VortexOpenOptions, VortexWriteOptions};
2020
use vortex::io::TokioFile;
2121
use vortex::{Array, ArrayRef};
2222

23+
use crate::datasets::BenchmarkDataset;
2324
use crate::datasets::data_downloads::{decompress_bz2, download_data};
2425
use crate::datasets::public_bi_data::PBIDataset::*;
25-
use crate::datasets::BenchmarkDataset;
2626
use crate::parquet_reader::parquet_to_vortex;
27-
use crate::{idempotent, idempotent_async, IdempotentPath};
27+
use crate::{IdempotentPath, idempotent, idempotent_async};
2828

2929
// NB: we do not expect this to change, otherwise we'd crawl the site and populate it at runtime
3030
// We will eventually switch over to self-hosting this data, at which time this map will need

bench-vortex/src/datasets/taxi_data.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,15 @@ use std::path::PathBuf;
22

33
use async_trait::async_trait;
44
use tokio::fs::File;
5+
use vortex::ArrayRef;
56
use vortex::error::VortexError;
67
use vortex::file::{VortexOpenOptions, VortexWriteOptions};
78
use vortex::io::TokioFile;
8-
use vortex::ArrayRef;
99

10-
use crate::datasets::data_downloads::download_data;
1110
use crate::datasets::BenchmarkDataset;
11+
use crate::datasets::data_downloads::download_data;
1212
use crate::parquet_reader::parquet_to_vortex;
13-
use crate::{idempotent_async, IdempotentPath};
13+
use crate::{IdempotentPath, idempotent_async};
1414

1515
pub struct TaxiData;
1616

bench-vortex/src/display.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ use tabled::settings::themes::Colorization;
44
use tabled::settings::{Color, Style};
55
use vortex::aliases::hash_map::HashMap;
66

7-
use crate::measurements::{MeasurementValue, TableValue, ToJson, ToTable};
87
use crate::Format;
8+
use crate::measurements::{MeasurementValue, TableValue, ToJson, ToTable};
99

1010
#[derive(ValueEnum, Default, Clone, Debug)]
1111
pub enum DisplayFormat {

bench-vortex/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use datafusion::execution::object_store::DefaultObjectStoreRegistry;
1818
use datafusion::execution::runtime_env::RuntimeEnvBuilder;
1919
use datafusion::prelude::{SessionConfig, SessionContext};
2020
use datafusion_physical_plan::metrics::MetricsSet;
21-
use datafusion_physical_plan::{collect, ExecutionPlan};
21+
use datafusion_physical_plan::{ExecutionPlan, collect};
2222
use rand::{Rng, SeedableRng as _};
2323
use tracing::level_filters::LevelFilter;
2424
use tracing_subscriber::EnvFilter;

bench-vortex/src/metrics.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use std::sync::Arc;
22

3-
use datafusion_physical_plan::metrics::{Label, MetricValue, MetricsSet};
43
use datafusion_physical_plan::Metric;
4+
use datafusion_physical_plan::metrics::{Label, MetricValue, MetricsSet};
55
use vortex::aliases::hash_map::HashMap;
66

77
pub trait MetricsSetExt {

bench-vortex/src/parquet_reader.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,12 @@ use std::path::PathBuf;
33

44
use arrow_array::RecordBatchReader;
55
use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder;
6+
use vortex::TryIntoArray;
67
use vortex::arrow::FromArrowType;
78
use vortex::dtype::DType;
89
use vortex::error::{VortexError, VortexResult};
910
use vortex::iter::{ArrayIteratorAdapter, ArrayIteratorExt};
1011
use vortex::stream::ArrayStream;
11-
use vortex::TryIntoArray;
1212

1313
pub async fn parquet_to_vortex(parquet_path: PathBuf) -> VortexResult<impl ArrayStream> {
1414
let reader = ParquetRecordBatchReaderBuilder::try_new(File::open(parquet_path)?)?.build()?;

bench-vortex/src/random_access/take.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@ use arrow_select::concat::concat_batches;
77
use arrow_select::take::take_record_batch;
88
use futures::stream;
99
use itertools::Itertools;
10+
use parquet::arrow::ParquetRecordBatchStreamBuilder;
1011
use parquet::arrow::arrow_reader::ArrowReaderOptions;
1112
use parquet::arrow::async_reader::AsyncFileReader;
12-
use parquet::arrow::ParquetRecordBatchStreamBuilder;
1313
use parquet::file::metadata::RowGroupMetaData;
1414
use stream::StreamExt;
1515
use vortex::aliases::hash_map::HashMap;

bench-vortex/src/tpch/dbgen.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,9 @@ fn get_or_cache_toolchain(
181181
let download_dir = dbgen_dir(cache_dir, version, platform);
182182
std::fs::create_dir_all(&download_dir)?;
183183

184-
let url = format!("https://github.com/spiraldb/tpch-dbgen/releases/download/{version}/dbgen-{platform}-{version}.tar");
184+
let url = format!(
185+
"https://github.com/spiraldb/tpch-dbgen/releases/download/{version}/dbgen-{platform}-{version}.tar"
186+
);
185187

186188
let mut zip_file = reqwest::blocking::get(url)?;
187189
let zip_path = download_dir.join(

bench-vortex/src/tpch/mod.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -6,30 +6,30 @@ use std::sync::Arc;
66
use arrow_array::StructArray as ArrowStructArray;
77
use arrow_schema::Schema;
88
use datafusion::dataframe::DataFrameWriteOptions;
9+
use datafusion::datasource::MemTable;
910
use datafusion::datasource::listing::{
1011
ListingOptions, ListingTable, ListingTableConfig, ListingTableUrl,
1112
};
12-
use datafusion::datasource::MemTable;
1313
use datafusion::prelude::{CsvReadOptions, ParquetReadOptions, SessionContext};
1414
use futures::StreamExt;
1515
use named_locks::with_lock;
16+
use object_store::ObjectStore;
1617
use object_store::aws::AmazonS3Builder;
1718
use object_store::gcp::GoogleCloudStorageBuilder;
1819
use object_store::local::LocalFileSystem;
1920
use object_store::path::Path as ObjectStorePath;
20-
use object_store::ObjectStore;
2121
use tokio::fs::OpenOptions;
2222
use url::Url;
2323
use vortex::arrays::ChunkedArray;
2424
use vortex::arrow::{FromArrowArray, FromArrowType};
2525
use vortex::dtype::DType;
2626
use vortex::error::VortexExpect as _;
27-
use vortex::file::{VortexWriteOptions, VORTEX_FILE_EXTENSION};
27+
use vortex::file::{VORTEX_FILE_EXTENSION, VortexWriteOptions};
2828
use vortex::{Array, ArrayRef, TryIntoArray};
29-
use vortex_datafusion::persistent::VortexFormat;
3029
use vortex_datafusion::SessionContextExt;
30+
use vortex_datafusion::persistent::VortexFormat;
3131

32-
use crate::{get_session_with_cache, Format, CTX};
32+
use crate::{CTX, Format, get_session_with_cache};
3333

3434
pub mod dbgen;
3535
pub mod duckdb;

encodings/alp/benches/alp_compress.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@
33
use divan::Bencher;
44
use rand::rngs::StdRng;
55
use rand::{Rng, SeedableRng as _};
6-
use vortex_alp::{alp_encode, ALPFloat, ALPRDFloat, RDEncoder};
6+
use vortex_alp::{ALPFloat, ALPRDFloat, RDEncoder, alp_encode};
7+
use vortex_array::Array;
78
use vortex_array::arrays::PrimitiveArray;
89
use vortex_array::validity::Validity;
9-
use vortex_array::Array;
1010
use vortex_buffer::buffer;
1111
use vortex_dtype::NativePType;
1212

encodings/alp/src/alp/array.rs

+6-5
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,16 @@ use vortex_array::stats::StatsSet;
77
use vortex_array::variants::PrimitiveArrayTrait;
88
use vortex_array::vtable::{StatisticsVTable, VTableRef};
99
use vortex_array::{
10-
encoding_ids, Array, ArrayCanonicalImpl, ArrayExt, ArrayImpl, ArrayRef, ArrayStatisticsImpl,
10+
Array, ArrayCanonicalImpl, ArrayExt, ArrayImpl, ArrayRef, ArrayStatisticsImpl,
1111
ArrayValidityImpl, ArrayVariantsImpl, Canonical, Encoding, EncodingId, SerdeMetadata,
12+
encoding_ids,
1213
};
1314
use vortex_dtype::{DType, PType};
14-
use vortex_error::{vortex_bail, VortexResult};
15+
use vortex_error::{VortexResult, vortex_bail};
1516
use vortex_mask::Mask;
1617

1718
use crate::alp::serde::ALPMetadata;
18-
use crate::alp::{alp_encode, decompress, Exponents};
19+
use crate::alp::{Exponents, alp_encode, decompress};
1920

2021
#[derive(Clone, Debug)]
2122
pub struct ALPArray {
@@ -142,13 +143,13 @@ impl StatisticsVTable<&ALPArray> for ALPEncoding {}
142143

143144
#[cfg(test)]
144145
mod tests {
146+
use vortex_array::SerdeMetadata;
145147
use vortex_array::patches::PatchesMetadata;
146148
use vortex_array::test_harness::check_metadata;
147-
use vortex_array::SerdeMetadata;
148149
use vortex_dtype::PType;
149150

150-
use crate::alp::serde::ALPMetadata;
151151
use crate::Exponents;
152+
use crate::alp::serde::ALPMetadata;
152153

153154
#[cfg_attr(miri, ignore)]
154155
#[test]

0 commit comments

Comments
 (0)