Skip to content

Commit

Permalink
clippy + cleanup
Browse files Browse the repository at this point in the history
use new expect lint in clippy, which errors if the lint is not required
anymore
cleanup
  • Loading branch information
PSeitz committed Sep 6, 2024
1 parent 85395d9 commit 2ce0729
Show file tree
Hide file tree
Showing 9 changed files with 15 additions and 21 deletions.
4 changes: 2 additions & 2 deletions src/aggregation/agg_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ fn test_aggregation_invalid_requests() -> crate::Result<()> {
}
}));

assert_eq!(agg_req_1.is_err(), true);
assert!(agg_req_1.is_err());
assert_eq!(agg_req_1.unwrap_err().to_string(), "missing field `field`");

let agg_req_1: Result<Aggregations, serde_json::Error> = serde_json::from_value(json!({
Expand All @@ -596,7 +596,7 @@ fn test_aggregation_invalid_requests() -> crate::Result<()> {
}
}));

assert_eq!(agg_req_1.is_err(), true);
assert!(agg_req_1.is_err());
// TODO: This should list valid values
assert!(agg_req_1
.unwrap_err()
Expand Down
4 changes: 2 additions & 2 deletions src/fastfield/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -839,12 +839,12 @@ mod tests {
.bool("field_bool")
.unwrap()
.first_or_default_col(false);
assert_eq!(col.get_val(0), false);
assert!(!col.get_val(0));
let col = fastfield_readers
.bool("field_bool")
.unwrap()
.first_or_default_col(true);
assert_eq!(col.get_val(0), true);
assert!(col.get_val(0));
}

fn get_index(docs: &[crate::TantivyDocument], schema: &Schema) -> crate::Result<RamDirectory> {
Expand Down
2 changes: 0 additions & 2 deletions src/functional_test.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#![allow(deprecated)] // Remove with index sorting

use std::collections::HashSet;

use rand::{thread_rng, Rng};
Expand Down
2 changes: 1 addition & 1 deletion src/index/segment_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,7 @@ pub fn merge_field_meta_data(
.into_iter()
.kmerge_by(|left, right| left < right)
// TODO: Remove allocation
.group_by(|el| (el.field_name.to_string(), el.typ))
.chunk_by(|el| (el.field_name.to_string(), el.typ))
{
let mut merged: FieldMetadata = group.next().unwrap();
for el in group {
Expand Down
10 changes: 5 additions & 5 deletions src/indexer/doc_opstamp_mapping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,10 @@ mod tests {
#[test]
fn test_doc_to_opstamp_mapping_with_map() {
let doc_to_opstamp_mapping = DocToOpstampMapping::WithMap(&[5u64, 1u64, 0u64, 4u64, 3u64]);
assert_eq!(doc_to_opstamp_mapping.is_deleted(0u32, 2u64), false);
assert_eq!(doc_to_opstamp_mapping.is_deleted(1u32, 2u64), true);
assert_eq!(doc_to_opstamp_mapping.is_deleted(2u32, 2u64), true);
assert_eq!(doc_to_opstamp_mapping.is_deleted(3u32, 2u64), false);
assert_eq!(doc_to_opstamp_mapping.is_deleted(4u32, 2u64), false);
assert!(!doc_to_opstamp_mapping.is_deleted(0u32, 2u64));
assert!(doc_to_opstamp_mapping.is_deleted(1u32, 2u64));
assert!(doc_to_opstamp_mapping.is_deleted(2u32, 2u64));
assert!(!doc_to_opstamp_mapping.is_deleted(3u32, 2u64));
assert!(!doc_to_opstamp_mapping.is_deleted(4u32, 2u64));
}
}
2 changes: 1 addition & 1 deletion src/indexer/log_merge_policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ impl MergePolicy for LogMergePolicy {

let mut current_max_log_size = f64::MAX;
let mut levels = vec![];
for (_, merge_group) in &size_sorted_segments.into_iter().group_by(|segment| {
for (_, merge_group) in &size_sorted_segments.into_iter().chunk_by(|segment| {
let segment_log_size = f64::from(self.clip_min_size(segment.num_docs())).log2();
if segment_log_size < (current_max_log_size - self.level_log_size) {
// update current_max_log_size to create a new group
Expand Down
2 changes: 1 addition & 1 deletion src/indexer/segment_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ impl SegmentWriter {
let vals_grouped_by_field = doc
.iter_fields_and_values()
.sorted_by_key(|(field, _)| *field)
.group_by(|(field, _)| *field);
.chunk_by(|(field, _)| *field);

for (field, field_values) in &vals_grouped_by_field {
let values = field_values.map(|el| el.1);
Expand Down
8 changes: 2 additions & 6 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@
#![cfg_attr(all(feature = "unstable", test), feature(test))]
#![doc(test(attr(allow(unused_variables), deny(warnings))))]
#![warn(missing_docs)]
#![allow(
#![expect(
clippy::len_without_is_empty,
clippy::derive_partial_eq_without_eq,
clippy::module_inception,
clippy::needless_range_loop,
clippy::bool_assert_comparison
clippy::needless_range_loop
)]

//! # `tantivy`
Expand Down Expand Up @@ -178,7 +177,6 @@ pub use crate::future_result::FutureResult;
pub type Result<T> = std::result::Result<T, TantivyError>;

mod core;
#[allow(deprecated)] // Remove with index sorting
pub mod indexer;

#[allow(unused_doc_comments)]
Expand All @@ -190,7 +188,6 @@ pub mod collector;
pub mod directory;
pub mod fastfield;
pub mod fieldnorm;
#[allow(deprecated)] // Remove with index sorting
pub mod index;
pub mod positions;
pub mod postings;
Expand Down Expand Up @@ -223,7 +220,6 @@ pub use self::docset::{DocSet, COLLECT_BLOCK_BUFFER_LEN, TERMINATED};
pub use crate::core::json_utils;
pub use crate::core::{Executor, Searcher, SearcherGeneration};
pub use crate::directory::Directory;
#[allow(deprecated)] // Remove with index sorting
pub use crate::index::{
Index, IndexBuilder, IndexMeta, IndexSettings, InvertedIndexReader, Order, Segment,
SegmentMeta, SegmentReader,
Expand Down
2 changes: 1 addition & 1 deletion src/tokenizer/regex_tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ mod tests {
#[test]
fn test_regexp_tokenizer_error_on_invalid_regex() {
let tokenizer = RegexTokenizer::new(r"\@(");
assert_eq!(tokenizer.is_err(), true);
assert!(tokenizer.is_err());
assert_eq!(
tokenizer.err().unwrap().to_string(),
"An invalid argument was passed: '\\@('"
Expand Down

0 comments on commit 2ce0729

Please sign in to comment.