diff --git a/build.rs b/build.rs index 8cdb4a7465142..4e5d67178a734 100644 --- a/build.rs +++ b/build.rs @@ -20,7 +20,7 @@ impl TrackedEnv { pub fn emit_rerun_stanzas(&self) { for env_var in &self.tracked { - println!("cargo:rerun-if-env-changed={}", env_var); + println!("cargo:rerun-if-env-changed={env_var}"); } } } @@ -33,9 +33,9 @@ enum ConstantValue { impl ConstantValue { pub fn as_parts(&self) -> (&'static str, String) { match &self { - ConstantValue::Required(value) => ("&str", format!("\"{}\"", value)), + ConstantValue::Required(value) => ("&str", format!("\"{value}\"")), ConstantValue::Optional(value) => match value { - Some(value) => ("Option<&str>", format!("Some(\"{}\")", value)), + Some(value) => ("Option<&str>", format!("Some(\"{value}\")")), None => ("Option<&str>", "None".to_string()), }, } @@ -79,10 +79,8 @@ impl BuildConstants { for (name, desc, value) in self.values { let (const_type, const_val) = value.as_parts(); - let full = format!( - "#[doc=r#\"{}\"#]\npub const {}: {} = {};\n", - desc, name, const_type, const_val - ); + let full = + format!("#[doc=r#\"{desc}\"#]\npub const {name}: {const_type} = {const_val};\n"); output_file.write_all(full.as_ref())?; } diff --git a/lib/codecs/src/decoding/format/gelf.rs b/lib/codecs/src/decoding/format/gelf.rs index de372ffce28d6..582a0d2b04fc4 100644 --- a/lib/codecs/src/decoding/format/gelf.rs +++ b/lib/codecs/src/decoding/format/gelf.rs @@ -84,11 +84,9 @@ impl GelfDeserializer { // GELF spec defines the version as 1.1 which has not changed since 2013 if parsed.version != GELF_VERSION { - return Err(format!( - "{} does not match GELF spec version ({})", - VERSION, GELF_VERSION - ) - .into()); + return Err( + format!("{VERSION} does not match GELF spec version ({GELF_VERSION})").into(), + ); } log.insert(VERSION, parsed.version.to_string()); @@ -140,16 +138,15 @@ impl GelfDeserializer { // per GELF spec, Additional field names must be prefixed with an underscore if !key.starts_with('_') { return Err(format!( - "'{}' field is invalid. \ - Additional field names must be prefixed with an underscore.", - key + "'{key}' field is invalid. \ + Additional field names must be prefixed with an underscore." ) .into()); } // per GELF spec, Additional field names must be characters dashes or dots if !VALID_FIELD_REGEX.is_match(key) { - return Err(format!("'{}' field contains invalid characters. Field names may \ - contain only letters, numbers, underscores, dashes and dots.", key).into()); + return Err(format!("'{key}' field contains invalid characters. Field names may \ + contain only letters, numbers, underscores, dashes and dots.").into()); } // per GELF spec, Additional field values must be either strings or numbers @@ -165,8 +162,8 @@ impl GelfDeserializer { serde_json::Value::Array(_) => "array", serde_json::Value::Object(_) => "object", }; - return Err(format!("The value type for field {} is an invalid type ({}). Additional field values \ - should be either strings or numbers.", key, type_).into()); + return Err(format!("The value type for field {key} is an invalid type ({type_}). Additional field values \ + should be either strings or numbers.").into()); } } } diff --git a/lib/codecs/src/decoding/format/json.rs b/lib/codecs/src/decoding/format/json.rs index fe0d2ef57dab7..7b261cb27451f 100644 --- a/lib/codecs/src/decoding/format/json.rs +++ b/lib/codecs/src/decoding/format/json.rs @@ -79,7 +79,7 @@ impl Deserializer for JsonDeserializer { } let json: serde_json::Value = serde_json::from_slice(&bytes) - .map_err(|error| format!("Error parsing JSON: {:?}", error))?; + .map_err(|error| format!("Error parsing JSON: {error:?}"))?; // If the root is an Array, split it into multiple events let mut events = match json { diff --git a/lib/codecs/src/decoding/format/native_json.rs b/lib/codecs/src/decoding/format/native_json.rs index 5e1a351d9e787..b8f5e241aa738 100644 --- a/lib/codecs/src/decoding/format/native_json.rs +++ b/lib/codecs/src/decoding/format/native_json.rs @@ -57,7 +57,7 @@ impl Deserializer for NativeJsonDeserializer { } let json: serde_json::Value = serde_json::from_slice(&bytes) - .map_err(|error| format!("Error parsing JSON: {:?}", error))?; + .map_err(|error| format!("Error parsing JSON: {error:?}"))?; let events = match json { serde_json::Value::Array(values) => values diff --git a/lib/codecs/src/decoding/framing/octet_counting.rs b/lib/codecs/src/decoding/framing/octet_counting.rs index 60a6408a50c7d..b8058341d9894 100644 --- a/lib/codecs/src/decoding/framing/octet_counting.rs +++ b/lib/codecs/src/decoding/framing/octet_counting.rs @@ -395,7 +395,7 @@ mod tests { buffer.put(&b"defghijklmnopqrstuvwxyzand here we are"[..]); let result = decoder.decode(&mut buffer); - println!("{:?}", result); + println!("{result:?}"); assert!(result.is_err()); assert_eq!(b"and here we are"[..], buffer); } diff --git a/lib/codecs/src/decoding/mod.rs b/lib/codecs/src/decoding/mod.rs index b7b41cef3b8c9..4c2f0983619b3 100644 --- a/lib/codecs/src/decoding/mod.rs +++ b/lib/codecs/src/decoding/mod.rs @@ -44,8 +44,8 @@ pub enum Error { impl std::fmt::Display for Error { fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::FramingError(error) => write!(formatter, "FramingError({})", error), - Self::ParsingError(error) => write!(formatter, "ParsingError({})", error), + Self::FramingError(error) => write!(formatter, "FramingError({error})"), + Self::ParsingError(error) => write!(formatter, "ParsingError({error})"), } } } diff --git a/lib/codecs/src/encoding/format/avro.rs b/lib/codecs/src/encoding/format/avro.rs index 24334aabf13e9..3719f1307a9aa 100644 --- a/lib/codecs/src/encoding/format/avro.rs +++ b/lib/codecs/src/encoding/format/avro.rs @@ -23,7 +23,7 @@ impl AvroSerializerConfig { /// Build the `AvroSerializer` from this configuration. pub fn build(&self) -> Result { let schema = apache_avro::Schema::parse_str(&self.avro.schema) - .map_err(|error| format!("Failed building Avro serializer: {}", error))?; + .map_err(|error| format!("Failed building Avro serializer: {error}"))?; Ok(AvroSerializer { schema }) } diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index d742166d51479..a3338c8bc3065 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -37,8 +37,8 @@ pub enum Error { impl std::fmt::Display for Error { fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::FramingError(error) => write!(formatter, "FramingError({})", error), - Self::SerializingError(error) => write!(formatter, "SerializingError({})", error), + Self::FramingError(error) => write!(formatter, "FramingError({error})"), + Self::SerializingError(error) => write!(formatter, "SerializingError({error})"), } } } diff --git a/lib/codecs/tests/native.rs b/lib/codecs/tests/native.rs index d4f9a139d35a4..6292838d6d57e 100644 --- a/lib/codecs/tests/native.rs +++ b/lib/codecs/tests/native.rs @@ -236,7 +236,7 @@ fn rebuild_fixtures(proto: &str, deserializer: &dyn Deserializer, serializer: &m .into_iter() .collect(); let mut out = File::create(&new_path).unwrap_or_else(|error| { - panic!("Could not create rebuilt file {:?}: {:?}", new_path, error) + panic!("Could not create rebuilt file {new_path:?}: {error:?}") }); out.write_all(&buf).expect("Could not write rebuilt data"); out.flush().expect("Could not write rebuilt data"); diff --git a/lib/datadog/grok/src/filters/array.rs b/lib/datadog/grok/src/filters/array.rs index bb9b164f589ed..7b4f977b1f734 100644 --- a/lib/datadog/grok/src/filters/array.rs +++ b/lib/datadog/grok/src/filters/array.rs @@ -95,12 +95,12 @@ pub fn parse<'a>( delimiter: Option<&'a str>, ) -> Result, String> { let result = parse_array(brackets, delimiter)(input) - .map_err(|_| format!("could not parse '{}' as array", input)) + .map_err(|_| format!("could not parse '{input}' as array")) .and_then(|(rest, result)| { rest.trim() .is_empty() .then_some(result) - .ok_or_else(|| format!("could not parse '{}' as array", input)) + .ok_or_else(|| format!("could not parse '{input}' as array")) })?; Ok(result) diff --git a/lib/datadog/grok/src/filters/keyvalue.rs b/lib/datadog/grok/src/filters/keyvalue.rs index 300a6efdfdc3e..c2dd885c4a3ec 100644 --- a/lib/datadog/grok/src/filters/keyvalue.rs +++ b/lib/datadog/grok/src/filters/keyvalue.rs @@ -175,7 +175,7 @@ fn parse<'a>( quotes, value_re, ) - .map_err(|_| format!("could not parse '{}' as 'keyvalue'", input))?; + .map_err(|_| format!("could not parse '{input}' as 'keyvalue'"))?; if rest.trim().is_empty() { Ok(result) diff --git a/lib/datadog/grok/src/grok.rs b/lib/datadog/grok/src/grok.rs index dacd5fe526f9e..eceb0ac0fcc12 100644 --- a/lib/datadog/grok/src/grok.rs +++ b/lib/datadog/grok/src/grok.rs @@ -180,13 +180,13 @@ impl Grok { if let Some(definition) = m.at(DEFINITION_INDEX) { self.insert_definition(raw_pattern, definition); - name = format!("{}={}", name, definition); + name = format!("{name}={definition}"); } // Since a pattern with a given name can show up more than once, we need to // loop through the number of matches found and apply the transformations // on each of them. - for _ in 0..named_regex.matches(&format!("%{{{}}}", name)).count() { + for _ in 0..named_regex.matches(&format!("%{{{name}}}")).count() { // Check if we have a definition for the raw pattern key and fail quickly // if not. let pattern_definition = match self.definitions.get(raw_pattern) { @@ -200,7 +200,7 @@ impl Grok { // engine understands and uses a named group. let replacement = if with_alias_only && m.at(ALIAS_INDEX).is_none() { - format!("(?:{})", pattern_definition) + format!("(?:{pattern_definition})") } else { // If an alias is specified by the user use that one to // match the name conversion, otherwise just use @@ -210,16 +210,16 @@ impl Grok { Some(a) => String::from(a), None => name.clone(), }, - format!("name{}", index), + format!("name{index}"), ); - format!("(?{})", index, pattern_definition) + format!("(?{pattern_definition})") }; // Finally, look for the original %{...} style pattern and // replace it with our replacement (only the first occurrence // since we are iterating one by one). - named_regex = named_regex.replacen(&format!("%{{{}}}", name), &replacement, 1); + named_regex = named_regex.replacen(&format!("%{{{name}}}"), &replacement, 1); index += 1; } diff --git a/lib/datadog/grok/src/matchers/date.rs b/lib/datadog/grok/src/matchers/date.rs index bb97f3f86a038..8ccb67835fb03 100644 --- a/lib/datadog/grok/src/matchers/date.rs +++ b/lib/datadog/grok/src/matchers/date.rs @@ -77,7 +77,7 @@ pub fn convert_time_format(format: &str) -> std::result::Result time_format.push_str("%:z"); } } - _ => return Err(format!("invalid date format '{}'", format)), + _ => return Err(format!("invalid date format '{format}'")), } } else if c == '\'' // quoted literal @@ -125,9 +125,9 @@ fn parse_offset(tz: &str) -> Result { } let offset_format = if tz.contains(':') { "%:z" } else { "%z" }; // apparently the easiest way to parse tz offset is parsing the complete datetime - let date_str = format!("2020-04-12 22:10:57 {}", tz); + let date_str = format!("2020-04-12 22:10:57 {tz}"); let datetime = - DateTime::parse_from_str(&date_str, &format!("%Y-%m-%d %H:%M:%S {}", offset_format)) + DateTime::parse_from_str(&date_str, &format!("%Y-%m-%d %H:%M:%S {offset_format}")) .map_err(|e| e.to_string())?; Ok(datetime.timezone()) } @@ -203,7 +203,7 @@ pub fn time_format_to_regex( } with_tz = true; } - _ => return Err(format!("invalid date format '{}'", format)), + _ => return Err(format!("invalid date format '{format}'")), } } else if c == '\'' { // quoted literal diff --git a/lib/datadog/grok/src/parse_grok_pattern.rs b/lib/datadog/grok/src/parse_grok_pattern.rs index c6f8387675cdb..bc6af7ede767f 100644 --- a/lib/datadog/grok/src/parse_grok_pattern.rs +++ b/lib/datadog/grok/src/parse_grok_pattern.rs @@ -16,7 +16,7 @@ pub fn parse_grok_pattern(input: &str) -> Result { .parse(input, lexer) .map_err(|e| match e { ParseError::User { error } => error.to_string(), - _ => format!("invalid grok pattern: {}", input), + _ => format!("invalid grok pattern: {input}"), }) } @@ -41,7 +41,7 @@ mod tests { fn parse_grok_filter() { let input = r#"%{date:e-http.status.abc[".\""]:integer("a. df",.123,1.23e-32, true, null, 123e-5)}"#; let parsed = parse_grok_pattern(input).unwrap_or_else(|error| { - panic!("Problem parsing grok: {:?}", error); + panic!("Problem parsing grok: {error:?}"); }); assert_eq!(parsed.match_fn.name, "date"); let destination = parsed.destination.unwrap(); @@ -72,7 +72,7 @@ mod tests { fn empty_field() { let input = r#"%{data:}"#; let parsed = parse_grok_pattern(input).unwrap_or_else(|error| { - panic!("Problem parsing grok: {:?}", error); + panic!("Problem parsing grok: {error:?}"); }); assert_eq!(parsed.destination, None); } @@ -81,7 +81,7 @@ mod tests { fn escaped_quotes() { let input = r#"%{data:field:filter("escaped \"quotes\"")}"#; let parsed = parse_grok_pattern(input).unwrap_or_else(|error| { - panic!("Problem parsing grok: {:?}", error); + panic!("Problem parsing grok: {error:?}"); }); assert_eq!( parsed.destination, @@ -99,7 +99,7 @@ mod tests { fn empty_field_with_filter() { let input = r#"%{data::json}"#; let parsed = parse_grok_pattern(input).unwrap_or_else(|error| { - panic!("Problem parsing grok: {:?}", error); + panic!("Problem parsing grok: {error:?}"); }); assert_eq!( parsed.destination, @@ -126,7 +126,7 @@ mod tests { fn escaped_new_line() { let input = r#"%{data::array("\\n")}"#; let parsed = parse_grok_pattern(input).unwrap_or_else(|error| { - panic!("Problem parsing grok: {:?}", error); + panic!("Problem parsing grok: {error:?}"); }); assert_eq!( parsed.destination, diff --git a/lib/datadog/grok/src/parse_grok_rules.rs b/lib/datadog/grok/src/parse_grok_rules.rs index 04a1a4d021bed..a2f22d22bd4ae 100644 --- a/lib/datadog/grok/src/parse_grok_rules.rs +++ b/lib/datadog/grok/src/parse_grok_rules.rs @@ -304,7 +304,7 @@ fn resolve_grok_pattern( resolves_match_function(grok_alias.clone(), pattern, context)?; if let Some(grok_alias) = &grok_alias { - context.append_regex(&format!(":{}", grok_alias)); + context.append_regex(&format!(":{grok_alias}")); } context.append_regex("}"); } diff --git a/lib/datadog/search-syntax/src/node.rs b/lib/datadog/search-syntax/src/node.rs index bca993ac62f99..82698777a03ab 100644 --- a/lib/datadog/search-syntax/src/node.rs +++ b/lib/datadog/search-syntax/src/node.rs @@ -42,9 +42,9 @@ pub enum ComparisonValue { impl std::fmt::Display for ComparisonValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::String(s) => write!(f, "{}", s), - Self::Integer(num) => write!(f, "{}", num), - Self::Float(num) => write!(f, "{}", num), + Self::String(s) => write!(f, "{s}"), + Self::Integer(num) => write!(f, "{num}"), + Self::Float(num) => write!(f, "{num}"), Self::Unbounded => write!(f, "*"), } } @@ -181,8 +181,8 @@ impl QueryNode { match self { QueryNode::MatchAllDocs => String::from("*:*"), QueryNode::MatchNoDocs => String::from("-*:*"), - QueryNode::AttributeExists { attr } => format!("_exists_:{}", attr), - QueryNode::AttributeMissing { attr } => format!("_missing_:{}", attr), + QueryNode::AttributeExists { attr } => format!("_exists_:{attr}"), + QueryNode::AttributeMissing { attr } => format!("_missing_:{attr}"), QueryNode::AttributeRange { attr, lower, @@ -339,7 +339,7 @@ impl QueryNode { if attr == DEFAULT_FIELD { String::new() } else { - format!("{}:", attr) + format!("{attr}:") } } } diff --git a/lib/datadog/search-syntax/src/parser.rs b/lib/datadog/search-syntax/src/parser.rs index 272c7db83701a..1b40c81d9ae42 100644 --- a/lib/datadog/search-syntax/src/parser.rs +++ b/lib/datadog/search-syntax/src/parser.rs @@ -36,7 +36,7 @@ mod tests { fn parses_whitespace() { let cases = [" ", " ", "\t"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::MatchAllDocs), "Failed to parse MatchAllDocs query out of empty input" @@ -48,14 +48,12 @@ mod tests { fn parses_unquoted_default_field_query() { let cases = ["foo"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeTerm { ref attr, ref value } if attr == DEFAULT_FIELD && value == "foo"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -64,14 +62,12 @@ mod tests { fn parses_quoted_default_field_query() { let cases = ["\"foo bar\""]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::QuotedAttribute { ref attr, ref phrase } if attr == DEFAULT_FIELD && phrase == "foo bar"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -80,14 +76,12 @@ mod tests { fn parses_attribute_term_query() { let cases = ["foo:bar", "foo:(bar)", "foo:b\\ar", "foo:(b\\ar)"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeTerm { ref attr, ref value } if attr == "foo" && value == "bar"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -96,14 +90,12 @@ mod tests { fn parses_numeric_attribute_term_query() { let cases = ["foo:10"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeTerm { ref attr, ref value } if attr == "foo" && value == "10"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -112,14 +104,12 @@ mod tests { fn parses_attribute_term_query_with_escapes() { let cases = ["foo:bar\\:baz", "fo\\o:bar\\:baz"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeTerm { ref attr, ref value } if attr == "foo" && value == "bar:baz"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -128,14 +118,12 @@ mod tests { fn parses_attribute_comparison_query_with_escapes() { let cases = ["foo:<4.12345E-4", "foo:<4.12345E\\-4"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeComparison { ref attr, value: ComparisonValue::Float(ref compvalue), comparator: Comparison::Lt } if attr == "foo" && (*compvalue - 4.12345E-4).abs() < 0.001), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -144,14 +132,12 @@ mod tests { fn parses_and_normalizes_multiterm_query() { let cases = ["foo bar", "foo bar"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeTerm { ref attr, ref value } if attr == DEFAULT_FIELD && value == "foo bar"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -160,7 +146,7 @@ mod tests { fn parses_multiple_multiterm_query() { let cases = ["foo bar baz AND qux quux quuz"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::Boolean { oper: BooleanType::And, ref nodes, @@ -171,12 +157,10 @@ mod tests { && matches!(nodes[1], QueryNode::AttributeTerm { ref attr, ref value } if attr == "_default_" && value == "baz") && matches!(nodes[2], QueryNode::AttributeTerm { ref attr, ref value } if attr == "_default_" && value == "qux") && matches!(nodes[3], QueryNode::AttributeTerm { ref attr, ref value } if attr == "_default_" && value == "quux quuz"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } else { - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } } @@ -185,7 +169,7 @@ mod tests { fn parses_negated_attribute_term_query() { let cases = ["-foo:bar", "- foo:bar", "NOT foo:bar"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::NegatedNode { ref node } = res { if let QueryNode::AttributeTerm { ref attr, @@ -197,7 +181,7 @@ mod tests { } } } - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } @@ -205,14 +189,12 @@ mod tests { fn parses_quoted_attribute_term_query() { let cases = ["foo:\"bar baz\""]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::QuotedAttribute { ref attr, ref phrase } if attr == "foo" && phrase == "bar baz"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -221,14 +203,12 @@ mod tests { fn parses_attribute_prefix_query() { let cases = ["foo:ba*"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributePrefix { ref attr, ref prefix } if attr == "foo" && prefix == "ba"), // We strip the trailing * from the prefix for escaping - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -237,14 +217,12 @@ mod tests { fn parses_attribute_wildcard_query() { let cases = ["foo:b*r"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeWildcard { ref attr, ref wildcard } if attr == "foo" && wildcard == "b*r"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -253,14 +231,12 @@ mod tests { fn parses_attribute_wildcard_query_with_trailing_question_mark() { let cases = ["foo:ba?"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeWildcard { ref attr, ref wildcard } if attr == "foo" && wildcard == "ba?"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -269,14 +245,12 @@ mod tests { fn parses_attribute_wildcard_query_with_leading_wildcard() { let cases = ["foo:*ar"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeWildcard { ref attr, ref wildcard } if attr == "foo" && wildcard == "*ar"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -285,7 +259,7 @@ mod tests { fn parses_non_numeric_attribute_comparison_query() { let cases = ["foo:>=bar"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeComparison { @@ -293,9 +267,7 @@ mod tests { value: ComparisonValue::String(ref cval), comparator: Comparison::Gte } if attr == "foo" && cval == "bar"), - "Unable to properly parse '{:?}' - got {:?}'", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}'" ); } } @@ -304,7 +276,7 @@ mod tests { fn parses_numeric_attribute_range_query() { let cases = ["foo:[10 TO 20]"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeRange { @@ -314,9 +286,7 @@ mod tests { upper: ComparisonValue::Integer(ref ustr), upper_inclusive: true } if attr == "foo" && *lstr == 10 && *ustr == 20), - "Unable to properly parse '{:?}' - got {:?}'", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}'" ); } } @@ -325,7 +295,7 @@ mod tests { fn parses_non_numeric_attribute_range_query() { let cases = ["foo:{bar TO baz}"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeRange { @@ -335,9 +305,7 @@ mod tests { upper: ComparisonValue::String(ref ustr), upper_inclusive: false } if attr == "foo" && lstr == "bar" && ustr == "baz"), - "Unable to properly parse '{:?}' - got {:?}'", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}'" ); } } @@ -346,7 +314,7 @@ mod tests { fn parses_attribute_range_query_with_open_endpoints() { let cases = ["foo:[* TO *]"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeRange { @@ -356,9 +324,7 @@ mod tests { upper: ComparisonValue::Unbounded, upper_inclusive: true } if attr == "foo"), - "Unable to properly parse '{:?}' - got {:?}'", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}'" ); } } @@ -367,7 +333,7 @@ mod tests { fn parses_attribute_range_query_with_fake_wildcards() { let cases = ["foo:[ba* TO b*z]"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeRange { @@ -377,9 +343,7 @@ mod tests { upper: ComparisonValue::String(ref ustr), upper_inclusive: true } if attr == "foo" && lstr == "ba*" && ustr == "b*z"), - "Unable to properly parse '{:?}' - got {:?}'", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}'" ); } } @@ -388,14 +352,12 @@ mod tests { fn parses_attribute_exists_query() { let cases = ["_exists_:foo", "_exists_:\"foo\""]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeExists { ref attr } if attr == "foo"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -404,14 +366,12 @@ mod tests { fn parses_attribute_exists_query_with_escapes() { let cases = ["_exists_:foo\\ bar"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeExists { ref attr } if attr == "foo bar"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -420,14 +380,12 @@ mod tests { fn parses_star_as_wildcard_not_exists() { let cases = ["foo:*"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeWildcard { ref attr, ref wildcard } if attr == "foo" && wildcard == "*"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -436,14 +394,12 @@ mod tests { fn parses_attribute_missing_query() { let cases = ["_missing_:foo", "_missing_:\"foo\""]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeMissing { ref attr } if attr == "foo"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -452,14 +408,12 @@ mod tests { fn parses_attribute_missing_query_with_escapes() { let cases = ["_missing_:foo\\ bar"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeMissing { ref attr } if attr == "foo bar"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -468,12 +422,10 @@ mod tests { fn parses_match_all_docs_query() { let cases = ["*:*", "*", "_default_:*", "foo:(*:*)"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::MatchAllDocs), - "Failed to parse '{:?}' as MatchAllDocs, got {:?}", - query, - res + "Failed to parse '{query:?}' as MatchAllDocs, got {res:?}" ); } } @@ -482,14 +434,12 @@ mod tests { fn parses_all_as_wildcard() { let cases = ["_all:*"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::AttributeWildcard { ref attr, ref wildcard } if attr == "_all" && wildcard == "*"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } } @@ -504,12 +454,10 @@ mod tests { "foo:(NOT *:*)", ]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); assert!( matches!(res, QueryNode::MatchNoDocs), - "Failed to parse '{:?}' as MatchNoDocs, got {:?}", - query, - res + "Failed to parse '{query:?}' as MatchNoDocs, got {res:?}" ); } } @@ -518,7 +466,7 @@ mod tests { fn parses_boolean_nodes_with_implicit_operators() { let cases = ["foo:bar baz:qux quux:quuz"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::Boolean { oper: BooleanType::And, ref nodes, @@ -528,12 +476,10 @@ mod tests { matches!(nodes[0], QueryNode::AttributeTerm { ref attr, ref value } if attr == "foo" && value == "bar") && matches!(nodes[1], QueryNode::AttributeTerm { ref attr, ref value } if attr == "baz" && value == "qux") && matches!(nodes[2], QueryNode::AttributeTerm { ref attr, ref value } if attr == "quux" && value == "quuz"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } else { - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } } @@ -547,7 +493,7 @@ mod tests { "-foo:bar baz:qux -quux:quuz", ]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::Boolean { oper: BooleanType::And, ref nodes, @@ -557,12 +503,10 @@ mod tests { matches!(nodes[0], QueryNode::NegatedNode { ref node } if matches!(**node, QueryNode::AttributeTerm {ref attr, ref value } if attr == "foo" && value == "bar")) && matches!(nodes[1], QueryNode::AttributeTerm { ref attr, ref value } if attr == "baz" && value == "qux") && matches!(nodes[2], QueryNode::NegatedNode { ref node } if matches!(**node, QueryNode::AttributeTerm {ref attr, ref value } if attr == "quux" && value == "quuz")), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } else { - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } } @@ -571,7 +515,7 @@ mod tests { fn parses_boolean_nodes_with_explicit_operators() { let cases = ["foo:bar OR baz:qux AND quux:quuz"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::Boolean { oper: BooleanType::And, ref nodes, @@ -580,12 +524,10 @@ mod tests { assert!( matches!(nodes[0], QueryNode::AttributeTerm { ref attr, ref value } if attr == "baz" && value == "qux") && matches!(nodes[1], QueryNode::AttributeTerm { ref attr, ref value } if attr == "quux" && value == "quuz"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } else { - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } } @@ -594,7 +536,7 @@ mod tests { fn parses_boolean_nodes_with_implicit_and_explicit_operators() { let cases = ["foo:bar OR baz:qux quux:quuz"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::Boolean { oper: BooleanType::And, ref nodes, @@ -602,12 +544,10 @@ mod tests { { assert!( matches!(nodes[0], QueryNode::AttributeTerm { ref attr, ref value } if attr == "quux" && value == "quuz"), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } else { - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } } @@ -616,7 +556,7 @@ mod tests { fn parses_nested_boolean_query_node() { let cases = ["foo:bar (baz:qux quux:quuz)"]; for query in cases.iter() { - let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {:?}", query)); + let res = parse(query).unwrap_or_else(|_| panic!("Unable to parse query {query:?}")); if let QueryNode::Boolean { oper: BooleanType::And, ref nodes, @@ -628,12 +568,10 @@ mod tests { matches!(nodes[0], QueryNode::AttributeTerm { ref attr, ref value } if attr == "baz" && value == "qux") && matches!(nodes[1], QueryNode::AttributeTerm { ref attr, ref value } if attr == "quux" && value == "quuz") ), - "Unable to properly parse '{:?}' - got {:?}", - query, - res + "Unable to properly parse '{query:?}' - got {res:?}" ); } else { - panic!("Unable to properly parse '{:?}' - got {:?}", query, res) + panic!("Unable to properly parse '{query:?}' - got {res:?}") } } } diff --git a/lib/dnsmsg-parser/src/dns_message_parser.rs b/lib/dnsmsg-parser/src/dns_message_parser.rs index 36275d8af00c2..dbdafb4dd1244 100644 --- a/lib/dnsmsg-parser/src/dns_message_parser.rs +++ b/lib/dnsmsg-parser/src/dns_message_parser.rs @@ -205,8 +205,7 @@ impl DnsMessageParser { } for _i in 0..8 { if current_byte & 0b1000_0000 == 0b1000_0000 { - write!(port_string, "{} ", current_bit) - .expect("can always write to String"); + write!(port_string, "{current_bit} ").expect("can always write to String"); } current_byte <<= 1; current_bit += 1; @@ -239,10 +238,7 @@ impl DnsMessageParser { parse_ipv6_address(&mut dec)? }; let domain_name = parse_domain_name(&mut decoder)?; - Ok(( - Some(format!("{} {} {}", prefix, ipv6_address, domain_name)), - None, - )) + Ok((Some(format!("{prefix} {ipv6_address} {domain_name}")), None)) } fn parse_loc_rdata( @@ -289,8 +285,7 @@ impl DnsMessageParser { Ok(( Some(format!( - "{} {} {:.2}m {}m {}m {}m", - latitude, longitude, altitude, size, horizontal_precision, vertical_precision + "{latitude} {longitude} {altitude:.2}m {size}m {horizontal_precision}m {vertical_precision}m" )), None, )) @@ -326,12 +321,8 @@ impl DnsMessageParser { let mut dec = BinDecoder::new(&address_vec); parse_ipv6_address(&mut dec)? }; - write!( - apl_rdata, - "{}{}:{}/{}", - negation, address_family, address, prefix - ) - .expect("can always write to String"); + write!(apl_rdata, "{negation}{address_family}:{address}/{prefix}") + .expect("can always write to String"); apl_rdata.push(' '); } Ok((Some(apl_rdata.trim_end().to_string()), None)) @@ -381,21 +372,21 @@ impl DnsMessageParser { let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let rmailbx = parse_domain_name(&mut decoder)?; let emailbx = parse_domain_name(&mut decoder)?; - Ok((Some(format!("{} {}", rmailbx, emailbx)), None)) + Ok((Some(format!("{rmailbx} {emailbx}")), None)) } dns_message::RTYPE_RP => { let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let mbox = parse_domain_name(&mut decoder)?; let txt = parse_domain_name(&mut decoder)?; - Ok((Some(format!("{} {}", mbox, txt)), None)) + Ok((Some(format!("{mbox} {txt}")), None)) } dns_message::RTYPE_AFSDB => { let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let subtype = parse_u16(&mut decoder)?; let hostname = parse_domain_name(&mut decoder)?; - Ok((Some(format!("{} {}", subtype, hostname)), None)) + Ok((Some(format!("{subtype} {hostname}")), None)) } dns_message::RTYPE_X25 => { @@ -438,7 +429,7 @@ impl DnsMessageParser { let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let preference = parse_u16(&mut decoder)?; let intermediate_host = parse_domain_name(&mut decoder)?; - Ok((Some(format!("{} {}", preference, intermediate_host)), None)) + Ok((Some(format!("{preference} {intermediate_host}")), None)) } dns_message::RTYPE_NSAP => { @@ -446,7 +437,7 @@ impl DnsMessageParser { let mut decoder = BinDecoder::new(raw_rdata); let rdata_len = raw_rdata.len() as u16; let nsap_rdata = HEXUPPER.encode(&parse_vec_with_u16_len(&mut decoder, rdata_len)?); - Ok((Some(format!("0x{}", nsap_rdata)), None)) + Ok((Some(format!("0x{nsap_rdata}")), None)) } dns_message::RTYPE_PX => { @@ -454,7 +445,7 @@ impl DnsMessageParser { let preference = parse_u16(&mut decoder)?; let map822 = parse_domain_name(&mut decoder)?; let mapx400 = parse_domain_name(&mut decoder)?; - Ok((Some(format!("{} {} {}", preference, map822, mapx400)), None)) + Ok((Some(format!("{preference} {map822} {mapx400}")), None)) } dns_message::RTYPE_LOC => self.parse_loc_rdata(rdata.anything()), @@ -463,7 +454,7 @@ impl DnsMessageParser { let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let preference = parse_u16(&mut decoder)?; let exchanger = parse_domain_name(&mut decoder)?; - Ok((Some(format!("{} {}", preference, exchanger)), None)) + Ok((Some(format!("{preference} {exchanger}")), None)) } dns_message::RTYPE_CERT => { @@ -475,7 +466,7 @@ impl DnsMessageParser { let crl_len = raw_rdata.len() as u16 - 5; let crl = BASE64.encode(&parse_vec_with_u16_len(&mut decoder, crl_len)?); Ok(( - Some(format!("{} {} {} {}", cert_type, key_tag, algorithm, crl)), + Some(format!("{cert_type} {key_tag} {algorithm} {crl}")), None, )) } @@ -491,10 +482,7 @@ impl DnsMessageParser { let data_len = raw_rdata.len() as u16 - 3; let data = BASE64.encode(&parse_vec_with_u16_len(&mut decoder, data_len)?); - Ok(( - Some(format!("{} {} {} {}", meaning, coding, subcoding, data)), - None, - )) + Ok((Some(format!("{meaning} {coding} {subcoding} {data}")), None)) } dns_message::RTYPE_APL => self.parse_apl_rdata(rdata.anything()), @@ -747,11 +735,11 @@ fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option Ok((None, Some(rdata.anything().to_vec()))), _ => Err(DnsMessageParserError::SimpleError { - cause: format!("Unsupported rdata {:?}", rdata), + cause: format!("Unsupported rdata {rdata:?}"), }), }, _ => Err(DnsMessageParserError::SimpleError { - cause: format!("Unsupported rdata {:?}", rdata), + cause: format!("Unsupported rdata {rdata:?}"), }), } } @@ -856,7 +844,7 @@ fn parse_edns_opt_dnssec_algorithms( let algorithm_names: Vec = algorithms.iter().map(|alg| alg.to_string()).collect(); EdnsOptionEntry { opt_code: Into::::into(opt_code), - opt_name: format!("{:?}", opt_code), + opt_name: format!("{opt_code:?}"), opt_data: algorithm_names.join(" "), } } @@ -864,7 +852,7 @@ fn parse_edns_opt_dnssec_algorithms( fn parse_edns_opt(opt_code: EdnsCode, opt_data: &[u8]) -> EdnsOptionEntry { EdnsOptionEntry { opt_code: Into::::into(opt_code), - opt_name: format!("{:?}", opt_code), + opt_name: format!("{opt_code:?}"), opt_data: BASE64.encode(opt_data), } } @@ -873,17 +861,14 @@ fn parse_loc_rdata_size(data: u8) -> DnsParserResult { let base = (data & 0xF0) >> 4; if base > 9 { return Err(DnsMessageParserError::SimpleError { - cause: format!("The base shouldnt be greater than 9. Base: {}", base), + cause: format!("The base shouldnt be greater than 9. Base: {base}"), }); } let exponent = data & 0x0F; if exponent > 9 { return Err(DnsMessageParserError::SimpleError { - cause: format!( - "The exponent shouldnt be greater than 9. Exponent: {}", - exponent - ), + cause: format!("The exponent shouldnt be greater than 9. Exponent: {exponent}"), }); } @@ -1099,7 +1084,7 @@ fn parse_unknown_record_type(rtype: u16) -> Option { fn format_bytes_as_hex_string(bytes: &[u8]) -> String { bytes .iter() - .map(|e| format!("{:02X}", e)) + .map(|e| format!("{e:02X}")) .collect::>() .join(".") } @@ -1179,7 +1164,7 @@ mod tests { DnsMessageParserError::SimpleError { cause: e } => { panic!("Expected TrustDnsError, got {}.", &e) } - _ => panic!("{}.", err), + _ => panic!("{err}."), } } diff --git a/lib/enrichment/src/tables.rs b/lib/enrichment/src/tables.rs index 2bfef6969bc88..4c8ba4ccce916 100644 --- a/lib/enrichment/src/tables.rs +++ b/lib/enrichment/src/tables.rs @@ -145,7 +145,7 @@ impl TableRegistry { match *locked { None => Err("finish_load has been called".to_string()), Some(ref mut tables) => match tables.get_mut(table) { - None => Err(format!("table '{}' not loaded", table)), + None => Err(format!("table '{table}' not loaded")), Some(table) => table.add_index(case, fields), }, } @@ -209,7 +209,7 @@ impl TableSearch { let tables = self.0.load(); if let Some(ref tables) = **tables { match tables.get(table) { - None => Err(format!("table {} not loaded", table)), + None => Err(format!("table {table} not loaded")), Some(table) => table.find_table_row(case, condition, select, index), } } else { @@ -231,7 +231,7 @@ impl TableSearch { let tables = self.0.load(); if let Some(ref tables) = **tables { match tables.get(table) { - None => Err(format!("table {} not loaded", table)), + None => Err(format!("table {table} not loaded")), Some(table) => table.find_table_rows(case, condition, select, index), } } else { @@ -264,9 +264,9 @@ fn fmt_enrichment_table( tables.truncate(std::cmp::max(tables.len(), 0)); tables.push(')'); - write!(f, "{} {}", name, tables) + write!(f, "{name} {tables}") } - None => write!(f, "{} loading", name), + None => write!(f, "{name} loading"), } } diff --git a/lib/file-source/src/checkpointer.rs b/lib/file-source/src/checkpointer.rs index a7eaea307aacf..3f569a55b7423 100644 --- a/lib/file-source/src/checkpointer.rs +++ b/lib/file-source/src/checkpointer.rs @@ -229,10 +229,10 @@ impl Checkpointer { use FileFingerprint::*; let path = match fng { - BytesChecksum(c) => format!("g{:x}.{}", c, pos), - FirstLinesChecksum(c) => format!("h{:x}.{}", c, pos), - DevInode(dev, ino) => format!("i{:x}.{:x}.{}", dev, ino, pos), - Unknown(x) => format!("{:x}.{}", x, pos), + BytesChecksum(c) => format!("g{c:x}.{pos}"), + FirstLinesChecksum(c) => format!("h{c:x}.{pos}"), + DevInode(dev, ino) => format!("i{dev:x}.{ino:x}.{pos}"), + Unknown(x) => format!("{x:x}.{pos}"), }; self.directory.join(path) } diff --git a/lib/file-source/src/file_watcher/mod.rs b/lib/file-source/src/file_watcher/mod.rs index f39df392748cf..4f71e053bdec3 100644 --- a/lib/file-source/src/file_watcher/mod.rs +++ b/lib/file-source/src/file_watcher/mod.rs @@ -119,8 +119,8 @@ impl FileWatcher { (Box::new(reader), pos) } (false, false, ReadFrom::Beginning) => { - let pos = reader.seek(io::SeekFrom::Start(0)).unwrap(); - (Box::new(reader), pos) + reader.rewind().unwrap(); + (Box::new(reader), 0) } (false, false, ReadFrom::End) => { let pos = reader.seek(io::SeekFrom::End(0)).unwrap(); diff --git a/lib/file-source/src/file_watcher/tests/experiment.rs b/lib/file-source/src/file_watcher/tests/experiment.rs index bf435c513e1d9..b9494c278d349 100644 --- a/lib/file-source/src/file_watcher/tests/experiment.rs +++ b/lib/file-source/src/file_watcher/tests/experiment.rs @@ -81,7 +81,7 @@ fn experiment(actions: Vec) { } FileWatcherAction::RotateFile => { let mut new_path = path.clone(); - new_path.set_extension(format!("log.{}", rotation_count)); + new_path.set_extension(format!("log.{rotation_count}")); rotation_count += 1; fs::rename(&path, &new_path).expect("could not rename"); fp = fs::File::create(&path).expect("could not create"); diff --git a/lib/file-source/src/file_watcher/tests/experiment_no_truncations.rs b/lib/file-source/src/file_watcher/tests/experiment_no_truncations.rs index b1a184984f087..fec58e1167eaf 100644 --- a/lib/file-source/src/file_watcher/tests/experiment_no_truncations.rs +++ b/lib/file-source/src/file_watcher/tests/experiment_no_truncations.rs @@ -48,7 +48,7 @@ fn experiment_no_truncations(actions: Vec) { } FileWatcherAction::RotateFile => { let mut new_path = path.clone(); - new_path.set_extension(format!("log.{}", rotation_count)); + new_path.set_extension(format!("log.{rotation_count}")); rotation_count += 1; fs::rename(&path, &new_path).expect("could not rename"); fp = fs::File::create(&path).expect("could not create"); diff --git a/lib/k8s-e2e-tests/src/lib.rs b/lib/k8s-e2e-tests/src/lib.rs index 22bcaaaee12e7..afa437c07fae1 100644 --- a/lib/k8s-e2e-tests/src/lib.rs +++ b/lib/k8s-e2e-tests/src/lib.rs @@ -32,17 +32,17 @@ pub fn get_namespace() -> String { .map(|num| (num as char).to_ascii_lowercase()) .collect(); - format!("vector-{}", id) + format!("vector-{id}") } pub fn get_namespace_appended(namespace: &str, suffix: &str) -> String { - format!("{}-{}", namespace, suffix) + format!("{namespace}-{suffix}") } /// Gets a name we can use for roles to prevent them conflicting with other tests. /// Uses the provided namespace as the root. pub fn get_override_name(namespace: &str, suffix: &str) -> String { - format!("{}-{}", namespace, suffix) + format!("{namespace}-{suffix}") } /// Is the MULTINODE environment variable set? @@ -54,7 +54,7 @@ pub fn is_multinode() -> bool { /// to be run against the same cluster without the role names clashing. pub fn config_override_name(name: &str, cleanup: bool) -> String { let vectordir = if is_multinode() { - format!("{}-vector", name) + format!("{name}-vector") } else { "vector".to_string() }; @@ -241,9 +241,7 @@ pub async fn smoke_check_first_line(log_reader: &mut Reader) { let expected_pat = "INFO vector::app:"; assert!( first_line.contains(expected_pat), - "Expected a line ending with {:?} but got {:?}; vector might be malfunctioning", - expected_pat, - first_line + "Expected a line ending with {expected_pat:?} but got {first_line:?}; vector might be malfunctioning" ); } diff --git a/lib/k8s-e2e-tests/src/metrics.rs b/lib/k8s-e2e-tests/src/metrics.rs index d3fd61548efd9..a815acac35de3 100644 --- a/lib/k8s-e2e-tests/src/metrics.rs +++ b/lib/k8s-e2e-tests/src/metrics.rs @@ -64,7 +64,7 @@ pub async fn get_component_sent_events_total(url: &str) -> Result Result<(), Box> { let metrics = load(url).await?; if !extract_vector_started(&metrics) { - return Err(format!("`vector_started`-ish metric was not found:\n{}", metrics).into()); + return Err(format!("`vector_started`-ish metric was not found:\n{metrics}").into()); } Ok(()) } @@ -125,7 +125,7 @@ pub async fn assert_metrics_present( required_metrics.remove(metric_name); } if !required_metrics.is_empty() { - return Err(format!("Some host metrics were not found:\n{:?}", required_metrics).into()); + return Err(format!("Some host metrics were not found:\n{required_metrics:?}").into()); } Ok(()) } @@ -217,7 +217,7 @@ mod tests { for (input, expected_value) in cases { let input = input.join("\n"); let actual_value = extract_vector_started(&input); - assert_eq!(expected_value, actual_value, "input: {}", input); + assert_eq!(expected_value, actual_value, "input: {input}"); } } } diff --git a/lib/k8s-test-framework/src/port_forward.rs b/lib/k8s-test-framework/src/port_forward.rs index b2acefa71f79b..457bce8169990 100644 --- a/lib/k8s-test-framework/src/port_forward.rs +++ b/lib/k8s-test-framework/src/port_forward.rs @@ -33,7 +33,7 @@ pub fn port_forward( command.arg("port-forward"); command.arg("-n").arg(namespace); command.arg(resource); - command.arg(format!("{}:{}", local_port, resource_port)); + command.arg(format!("{local_port}:{resource_port}")); command.kill_on_drop(true); diff --git a/lib/k8s-test-framework/src/reader.rs b/lib/k8s-test-framework/src/reader.rs index 0b83d5885afcd..e6af49066a8b2 100644 --- a/lib/k8s-test-framework/src/reader.rs +++ b/lib/k8s-test-framework/src/reader.rs @@ -98,7 +98,7 @@ mod tests { let mut expected_num = 0; while let Some(line) = reader.read_line().await { // Assert we're getting expected lines. - assert_eq!(line, format!("Line {}\n", expected_num)); + assert_eq!(line, format!("Line {expected_num}\n")); // On line 100 issue a `kill` to stop the infinite stream. if expected_num == 100 { diff --git a/lib/k8s-test-framework/src/util.rs b/lib/k8s-test-framework/src/util.rs index a0499ba6fc875..997c5898d9027 100644 --- a/lib/k8s-test-framework/src/util.rs +++ b/lib/k8s-test-framework/src/util.rs @@ -6,7 +6,7 @@ pub async fn run_command(mut command: tokio::process::Command) -> Result<()> { info!("Running command `{:?}`", command); let exit_status = command.spawn()?.wait().await?; if !exit_status.success() { - return Err(format!("exec failed: {:?}", command).into()); + return Err(format!("exec failed: {command:?}").into()); } Ok(()) } @@ -15,7 +15,7 @@ pub fn run_command_blocking(mut command: std::process::Command) -> Result<()> { info!("Running command blocking `{:?}`", command); let exit_status = command.spawn()?.wait()?; if !exit_status.success() { - return Err(format!("exec failed: {:?}", command).into()); + return Err(format!("exec failed: {command:?}").into()); } Ok(()) } @@ -24,7 +24,7 @@ pub async fn run_command_output(mut command: tokio::process::Command) -> Result< info!("Fetching command `{:?}`", command); let output = command.spawn()?.wait_with_output().await?; if !output.status.success() { - return Err(format!("exec failed: {:?}", command).into()); + return Err(format!("exec failed: {command:?}").into()); } let output = String::from_utf8(output.stdout)?; diff --git a/lib/k8s-test-framework/src/wait_for_resource.rs b/lib/k8s-test-framework/src/wait_for_resource.rs index d3cba0fd56d45..7beb0d7721efc 100644 --- a/lib/k8s-test-framework/src/wait_for_resource.rs +++ b/lib/k8s-test-framework/src/wait_for_resource.rs @@ -86,7 +86,7 @@ where command.arg("--for"); match wait_for { WaitFor::Delete => command.arg("delete"), - WaitFor::Condition(cond) => command.arg(format!("condition={}", cond)), + WaitFor::Condition(cond) => command.arg(format!("condition={cond}")), }; command.args(extra); diff --git a/lib/loki-logproto/src/lib.rs b/lib/loki-logproto/src/lib.rs index 44ab526d8f8ea..8801ca494e64a 100644 --- a/lib/loki-logproto/src/lib.rs +++ b/lib/loki-logproto/src/lib.rs @@ -68,7 +68,7 @@ pub mod util { let mut labels: Vec = labels .iter() .filter(|(k, _)| !RESERVED_LABELS.contains(&k.as_str())) - .map(|(k, v)| format!("{}=\"{}\"", k, v)) + .map(|(k, v)| format!("{k}=\"{v}\"")) .collect(); labels.sort(); format!("{{{}}}", labels.join(", ")) diff --git a/lib/lookup/src/lookup_buf/mod.rs b/lib/lookup/src/lookup_buf/mod.rs index 13cef58cd3eec..4eb005d30f684 100644 --- a/lib/lookup/src/lookup_buf/mod.rs +++ b/lib/lookup/src/lookup_buf/mod.rs @@ -119,12 +119,12 @@ impl Display for LookupBuf { .map(|next| next.is_field() || next.is_coalesce()) .unwrap_or(false); match (segment, maybe_next) { - (SegmentBuf::Field(_), true) => write!(f, r#"{}."#, segment)?, - (SegmentBuf::Field(_), false) => write!(f, "{}", segment)?, - (SegmentBuf::Index(_), true) => write!(f, r#"[{}]."#, segment)?, - (SegmentBuf::Index(_), false) => write!(f, "[{}]", segment)?, - (SegmentBuf::Coalesce(_), true) => write!(f, r#"{}."#, segment)?, - (SegmentBuf::Coalesce(_), false) => write!(f, "{}", segment)?, + (SegmentBuf::Field(_), true) => write!(f, r#"{segment}."#)?, + (SegmentBuf::Field(_), false) => write!(f, "{segment}")?, + (SegmentBuf::Index(_), true) => write!(f, r#"[{segment}]."#)?, + (SegmentBuf::Index(_), false) => write!(f, "[{segment}]")?, + (SegmentBuf::Coalesce(_), true) => write!(f, r#"{segment}."#)?, + (SegmentBuf::Coalesce(_), false) => write!(f, "{segment}")?, } } Ok(()) diff --git a/lib/lookup/src/lookup_buf/segmentbuf.rs b/lib/lookup/src/lookup_buf/segmentbuf.rs index fc803fb36b3ab..e53928408de00 100644 --- a/lib/lookup/src/lookup_buf/segmentbuf.rs +++ b/lib/lookup/src/lookup_buf/segmentbuf.rs @@ -157,8 +157,8 @@ impl<'a> LookSegment<'a> for SegmentBuf { impl Display for SegmentBuf { fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match self { - SegmentBuf::Index(i) => write!(formatter, "{}", i), - SegmentBuf::Field(field) => write!(formatter, "{}", field), + SegmentBuf::Index(i) => write!(formatter, "{i}"), + SegmentBuf::Field(field) => write!(formatter, "{field}"), SegmentBuf::Coalesce(v) => write!( formatter, "({})", diff --git a/lib/lookup/src/lookup_buf/test.rs b/lib/lookup/src/lookup_buf/test.rs index 0ad278d9f1ea0..671d68dc1f3a9 100644 --- a/lib/lookup/src/lookup_buf/test.rs +++ b/lib/lookup/src/lookup_buf/test.rs @@ -169,8 +169,8 @@ fn iter() { for (index, expected) in SUFFICIENTLY_DECOMPOSED.iter().enumerate() { let parsed = iter .next() - .unwrap_or_else(|| panic!("Expected at index {}: {:?}, got None.", index, expected)); - assert_eq!(expected, parsed, "Failed at {}", index); + .unwrap_or_else(|| panic!("Expected at index {index}: {expected:?}, got None.")); + assert_eq!(expected, parsed, "Failed at {index}"); } } @@ -181,8 +181,8 @@ fn into_iter() { for (index, expected) in SUFFICIENTLY_DECOMPOSED.iter().cloned().enumerate() { let parsed = iter .next() - .unwrap_or_else(|| panic!("Expected at index {}: {:?}, got None.", index, expected)); - assert_eq!(expected, parsed, "Failed at {}", index); + .unwrap_or_else(|| panic!("Expected at index {index}: {expected:?}, got None.")); + assert_eq!(expected, parsed, "Failed at {index}"); } } diff --git a/lib/lookup/src/lookup_v2/compat.rs b/lib/lookup/src/lookup_v2/compat.rs index bb18e7009fbc0..af7133172c038 100644 --- a/lib/lookup/src/lookup_v2/compat.rs +++ b/lib/lookup/src/lookup_v2/compat.rs @@ -112,7 +112,7 @@ mod test { for test in tests { let lookup_buf = LookupBuf::from_str(test).unwrap(); if !ValuePath::eq(&test, &lookup_buf) { - panic!("Equality failed. Path={:?}", test); + panic!("Equality failed. Path={test:?}"); } } } diff --git a/lib/lookup/src/lookup_v2/owned.rs b/lib/lookup/src/lookup_v2/owned.rs index 00785aa2be72c..241eaab6a5e64 100644 --- a/lib/lookup/src/lookup_v2/owned.rs +++ b/lib/lookup/src/lookup_v2/owned.rs @@ -234,7 +234,7 @@ impl From for String { OwnedSegment::Field(field) => { serialize_field(field.as_ref(), (i != 0).then_some(".")) } - OwnedSegment::Index(index) => format!("[{}]", index), + OwnedSegment::Index(index) => format!("[{index}]"), OwnedSegment::Coalesce(fields) => { let mut output = String::new(); let (last, fields) = fields.split_last().expect("coalesce must not be empty"); diff --git a/lib/lookup/src/lookup_view/mod.rs b/lib/lookup/src/lookup_view/mod.rs index a8b714e53f2eb..f1066314edd2f 100644 --- a/lib/lookup/src/lookup_view/mod.rs +++ b/lib/lookup/src/lookup_view/mod.rs @@ -100,12 +100,12 @@ impl<'a> Display for Lookup<'a> { .unwrap_or(false); match (segment, maybe_next) { - (Segment::Field(_), true) => write!(f, r#"{}."#, segment)?, - (Segment::Field(_), false) => write!(f, "{}", segment)?, - (Segment::Index(_), true) => write!(f, r#"[{}]."#, segment)?, - (Segment::Index(_), false) => write!(f, "[{}]", segment)?, - (Segment::Coalesce(_), true) => write!(f, r#"{}."#, segment)?, - (Segment::Coalesce(_), false) => write!(f, "{}", segment)?, + (Segment::Field(_), true) => write!(f, r#"{segment}."#)?, + (Segment::Field(_), false) => write!(f, "{segment}")?, + (Segment::Index(_), true) => write!(f, r#"[{segment}]."#)?, + (Segment::Index(_), false) => write!(f, "[{segment}]")?, + (Segment::Coalesce(_), true) => write!(f, r#"{segment}."#)?, + (Segment::Coalesce(_), false) => write!(f, "{segment}")?, } } Ok(()) diff --git a/lib/lookup/src/lookup_view/segment.rs b/lib/lookup/src/lookup_view/segment.rs index ec1caae05ad9f..afe223e73f791 100644 --- a/lib/lookup/src/lookup_view/segment.rs +++ b/lib/lookup/src/lookup_view/segment.rs @@ -119,12 +119,12 @@ impl<'a> LookSegment<'a> for Segment<'a> { impl<'a> Display for Segment<'a> { fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match self { - Segment::Index(i) => write!(formatter, "{}", i), + Segment::Index(i) => write!(formatter, "{i}"), Segment::Field(Field { name, requires_quoting: false, - }) => write!(formatter, "{}", name), - Segment::Field(field) => write!(formatter, "{}", field), + }) => write!(formatter, "{name}"), + Segment::Field(field) => write!(formatter, "{field}"), Segment::Coalesce(v) => write!( formatter, "({})", diff --git a/lib/lookup/src/lookup_view/test.rs b/lib/lookup/src/lookup_view/test.rs index 737d8bc9f6077..f153c956cc99a 100644 --- a/lib/lookup/src/lookup_view/test.rs +++ b/lib/lookup/src/lookup_view/test.rs @@ -197,8 +197,8 @@ fn iter() { for (index, expected) in SUFFICIENTLY_DECOMPOSED.iter().enumerate() { let parsed = iter .next() - .unwrap_or_else(|| panic!("Expected at index {}: {:?}, got None.", index, expected)); - assert_eq!(expected, parsed, "Failed at {}", index); + .unwrap_or_else(|| panic!("Expected at index {index}: {expected:?}, got None.")); + assert_eq!(expected, parsed, "Failed at {index}"); } } @@ -209,8 +209,8 @@ fn into_iter() { for (index, expected) in SUFFICIENTLY_DECOMPOSED.iter().cloned().enumerate() { let parsed = iter .next() - .unwrap_or_else(|| panic!("Expected at index {}: {:?}, got None.", index, expected)); - assert_eq!(expected, parsed, "Failed at {}", index); + .unwrap_or_else(|| panic!("Expected at index {index}: {expected:?}, got None.")); + assert_eq!(expected, parsed, "Failed at {index}"); } } diff --git a/lib/prometheus-parser/src/line.rs b/lib/prometheus-parser/src/line.rs index f1b5dbe6dacf1..6895eb221d805 100644 --- a/lib/prometheus-parser/src/line.rs +++ b/lib/prometheus-parser/src/line.rs @@ -401,7 +401,7 @@ mod test { #[test] fn test_parse_escaped_string() { fn wrap(s: &str) -> String { - format!(" \t \"{}\" .", s) + format!(" \t \"{s}\" .") } // parser should not consume more that it needed @@ -441,7 +441,7 @@ mod test { #[test] fn test_parse_name() { fn wrap(s: &str) -> String { - format!(" \t {} .", s) + format!(" \t {s} .") } let tail = " ."; @@ -467,7 +467,7 @@ mod test { #[test] fn test_parse_header() { fn wrap(s: &str) -> String { - format!(" \t {} .", s) + format!(" \t {s} .") } let tail = " ."; @@ -541,7 +541,7 @@ mod test { #[test] fn test_parse_value() { fn wrap(s: &str) -> String { - format!(" \t {} .", s) + format!(" \t {s} .") } let tail = " ."; @@ -609,7 +609,7 @@ mod test { #[test] fn test_parse_labels() { fn wrap(s: &str) -> String { - format!(" \t {} .", s) + format!(" \t {s} .") } let tail = " ."; diff --git a/lib/tracing-limit/benches/limit.rs b/lib/tracing-limit/benches/limit.rs index ebf861b709a06..d639560bba3f6 100644 --- a/lib/tracing-limit/benches/limit.rs +++ b/lib/tracing-limit/benches/limit.rs @@ -97,7 +97,7 @@ struct Visitor<'a>(MutexGuard<'a, String>); impl<'a> field::Visit for Visitor<'a> { fn record_debug(&mut self, _field: &field::Field, value: &dyn fmt::Debug) { use std::fmt::Write; - let _ = write!(&mut *self.0, "{:?}", value); + let _ = write!(&mut *self.0, "{value:?}"); } } diff --git a/lib/tracing-limit/src/lib.rs b/lib/tracing-limit/src/lib.rs index 00e937bcabf5c..2c69ae7b932e7 100644 --- a/lib/tracing-limit/src/lib.rs +++ b/lib/tracing-limit/src/lib.rs @@ -387,7 +387,7 @@ impl Visit for RateLimitedSpanKeys { } fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { - self.record(field, format!("{:?}", value).into()); + self.record(field, format!("{value:?}").into()); } } @@ -435,7 +435,7 @@ impl Visit for MessageVisitor { fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { if self.message.is_none() && field.name() == MESSAGE_FIELD { - self.message = Some(format!("{:?}", value)); + self.message = Some(format!("{value:?}")); } } } @@ -575,8 +575,7 @@ mod test { info_span!("span", component_id = &key, vrl_position = &line_number); let _enter = span.enter(); info!( - message = - format!("Hello {} on line_number {}!", key, line_number).as_str(), + message = format!("Hello {key} on line_number {line_number}!").as_str(), internal_log_rate_limit = true ); } @@ -637,8 +636,7 @@ mod test { for key in &["foo", "bar"] { for line_number in &[1, 2] { info!( - message = - format!("Hello {} on line_number {}!", key, line_number).as_str(), + message = format!("Hello {key} on line_number {line_number}!").as_str(), internal_log_rate_limit = true, component_id = &key, vrl_position = &line_number diff --git a/lib/value/src/value/lua.rs b/lib/value/src/value/lua.rs index 647ca933ea332..8bb8918fdb656 100644 --- a/lib/value/src/value/lua.rs +++ b/lib/value/src/value/lua.rs @@ -283,13 +283,8 @@ mod test { assert!( test_fn .call::<_, bool>(value.clone()) - .unwrap_or_else(|_| panic!( - "Failed to call {} for value {:?}", - test_src, value - )), - "Test function: {}, value: {:?}", - test_src, - value + .unwrap_or_else(|_| panic!("Failed to call {test_src} for value {value:?}")), + "Test function: {test_src}, value: {value:?}" ); } } diff --git a/lib/value/src/value/serde.rs b/lib/value/src/value/serde.rs index f44a816c86b78..d92d8401da689 100644 --- a/lib/value/src/value/serde.rs +++ b/lib/value/src/value/serde.rs @@ -280,8 +280,7 @@ mod test { }; assert!( is_match, - "Typecheck failure. Wanted {}, got {:?}.", - expected_type, vector_value + "Typecheck failure. Wanted {expected_type}, got {vector_value:?}." ); let _value: serde_json::Value = vector_value.try_into().unwrap(); }, diff --git a/lib/value/src/value/target.rs b/lib/value/src/value/target.rs index 91f5ea9bcb625..d52357ca8024d 100644 --- a/lib/value/src/value/target.rs +++ b/lib/value/src/value/target.rs @@ -140,10 +140,7 @@ impl Value { where T: Iterator, { - let segment = match segments.next() { - Some(segment) => segment, - None => return Some(self), - }; + let Some(segment) = segments.next() else { return Some(self) }; self.get_by_segment(segment) .and_then(|value| value.get_by_segments(segments)) @@ -174,10 +171,7 @@ impl Value { where T: Iterator, { - let segment = match segments.next() { - Some(segments) => segments, - None => return Some(self), - }; + let Some(segment) = segments.next() else { return Some(self) }; self.get_by_segment_mut(segment) .and_then(|value| value.get_by_segments_mut(segments)) @@ -215,22 +209,19 @@ impl Value { where T: Iterator + Clone, { - let segment = match segments.next() { - Some(segments) => segments, - None => { - return match self { - Self::Object(v) => { - let v = std::mem::take(v); - Some(Self::Object(v)) - } - Self::Array(v) => { - let v = std::mem::take(v); - Some(Self::Array(v)) - } - _ => { - let v = std::mem::replace(self, Self::Null); - Some(v) - } + let Some(segment) = segments.next() else { + return match self { + Self::Object(v) => { + let v = std::mem::take(v); + Some(Self::Object(v)) + } + Self::Array(v) => { + let v = std::mem::take(v); + Some(Self::Array(v)) + } + _ => { + let v = std::mem::replace(self, Self::Null); + Some(v) } } }; @@ -288,10 +279,7 @@ impl Value { where T: Iterator + Clone, { - let segment = match segments.peek() { - Some(segment) => segment, - None => return *self = new, - }; + let Some(segment) = segments.peek() else { return *self = new }; // As long as the provided segments match the shape of the value, we'll // traverse down the tree. Once we encounter a value kind that does not @@ -311,10 +299,7 @@ impl Value { where T: Iterator + Clone, { - let segment = match segments.next() { - Some(segments) => segments, - None => return, - }; + let Some(segment) = segments.next() else { return }; let mut handle_field = |field: &str, new, mut segments: Peekable| { let key = field.to_owned(); @@ -325,10 +310,7 @@ impl Value { *self = BTreeMap::default().into(); } - let map = match self { - Self::Object(map) => map, - _ => unreachable!("see invariant above"), - }; + let Self::Object(map) = self else { unreachable!("see invariant above") }; match segments.peek() { // If there are no other segments to traverse, we'll add the new @@ -359,10 +341,7 @@ impl Value { // result in an actual value, so none of the fields match an // existing field. We'll pick the last field in the list to // insert the new value into. - let field = match fields.last() { - Some(field) => field, - None => return, - }; + let Some(field) = fields.last() else { return }; handle_field(field.as_str(), new, segments); } diff --git a/lib/vector-api-client/src/gql/components.rs b/lib/vector-api-client/src/gql/components.rs index 4efb95c47316f..13f9f43e028c0 100644 --- a/lib/vector-api-client/src/gql/components.rs +++ b/lib/vector-api-client/src/gql/components.rs @@ -203,7 +203,7 @@ impl fmt::Display for components_query::ComponentsQueryComponentsEdgesNodeOn { components_query::ComponentsQueryComponentsEdgesNodeOn::Sink(_) => "sink", }; - write!(f, "{}", res) + write!(f, "{res}") } } @@ -221,7 +221,7 @@ impl fmt::Display for component_added_subscription::ComponentAddedSubscriptionCo } }; - write!(f, "{}", res) + write!(f, "{res}") } } @@ -241,6 +241,6 @@ impl fmt::Display } }; - write!(f, "{}", res) + write!(f, "{res}") } } diff --git a/lib/vector-buffers/benches/common.rs b/lib/vector-buffers/benches/common.rs index c258407a488b3..b23d403f32576 100644 --- a/lib/vector-buffers/benches/common.rs +++ b/lib/vector-buffers/benches/common.rs @@ -60,7 +60,7 @@ pub struct EncodeError; impl fmt::Display for EncodeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) + write!(f, "{self:?}") } } @@ -71,7 +71,7 @@ pub struct DecodeError; impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) + write!(f, "{self:?}") } } diff --git a/lib/vector-buffers/examples/buffer_perf.rs b/lib/vector-buffers/examples/buffer_perf.rs index 7d27eff9a9180..89212387c4b80 100644 --- a/lib/vector-buffers/examples/buffer_perf.rs +++ b/lib/vector-buffers/examples/buffer_perf.rs @@ -113,7 +113,7 @@ pub struct EncodeError; impl fmt::Display for EncodeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) + write!(f, "{self:?}") } } @@ -124,7 +124,7 @@ pub struct DecodeError; impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) + write!(f, "{self:?}") } } @@ -247,7 +247,7 @@ where T: Bufferable + Clone + Finalizable, { let data_dir = PathBuf::from("/tmp/vector"); - let id = format!("{}-buffer-perf-testing", buffer_type); + let id = format!("{buffer_type}-buffer-perf-testing"); let max_size_events = std::num::NonZeroUsize::new(500).unwrap(); let max_size_bytes = std::num::NonZeroU64::new(32 * 1024 * 1024 * 1024).unwrap(); let when_full = WhenFull::Block; @@ -276,8 +276,7 @@ where } } s => panic!( - "unknown buffer type '{}' requested; valid types are in-memory, disk-v1, and disk-v2", - s + "unknown buffer type '{s}' requested; valid types are in-memory, disk-v1, and disk-v2" ), }; diff --git a/lib/vector-buffers/src/topology/acks.rs b/lib/vector-buffers/src/topology/acks.rs index de50cd75ee19f..3db7c19e3d317 100644 --- a/lib/vector-buffers/src/topology/acks.rs +++ b/lib/vector-buffers/src/topology/acks.rs @@ -714,8 +714,7 @@ mod tests { let actual_result = apply_action_sut(&mut sut, action); assert_eq!( expected_result, actual_result, - "{}: ran action {:?} expecting result {:?}, but got result {:?} instead", - name, action, expected_result, actual_result + "{name}: ran action {action:?} expecting result {expected_result:?}, but got result {actual_result:?} instead" ); } } @@ -862,8 +861,7 @@ mod tests { let effective_offset = acked_marker_id.wrapping_add(unclaimed_acks); let is_eligible = required_acked_offset <= effective_offset && required_acked_offset >= *marker_id; assert!(!is_eligible, - "SUT returned None but next fixed-size marker on stack is eligible: id: {}, len: {}, acked_id_offset: {}", - marker_id, len, acked_marker_id); + "SUT returned None but next fixed-size marker on stack is eligible: id: {marker_id}, len: {len}, acked_id_offset: {acked_marker_id}"); }, PendingMarkerLength::Unknown => { // If we have an unknown marker, the only we shouldn't be diff --git a/lib/vector-buffers/src/topology/channel/limited_queue.rs b/lib/vector-buffers/src/topology/channel/limited_queue.rs index c5575544462de..e14fb77e95efc 100644 --- a/lib/vector-buffers/src/topology/channel/limited_queue.rs +++ b/lib/vector-buffers/src/topology/channel/limited_queue.rs @@ -102,16 +102,13 @@ impl LimitedSender { pub async fn send(&mut self, item: T) -> Result<(), SendError> { // Calculate how many permits we need, and wait until we can acquire all of them. let permits_required = self.get_required_permits_for_item(&item); - let permits = match self + let Ok(permits) = self .inner .limiter .clone() .acquire_many_owned(permits_required) .await - { - Ok(permits) => permits, - Err(_) => return Err(SendError(item)), - }; + else { return Err(SendError(item)) }; self.inner .data diff --git a/lib/vector-buffers/src/variants/disk_v2/common.rs b/lib/vector-buffers/src/variants/disk_v2/common.rs index e93b6d197add4..4ff477d1ed07d 100644 --- a/lib/vector-buffers/src/variants/disk_v2/common.rs +++ b/lib/vector-buffers/src/variants/disk_v2/common.rs @@ -315,27 +315,18 @@ where if max_data_file_size > data_file_size_mechanical_limit { return Err(BuildError::InvalidParameter { param_name: "max_data_file_size", - reason: format!( - "cannot be greater than {} bytes", - data_file_size_mechanical_limit - ), + reason: format!("cannot be greater than {data_file_size_mechanical_limit} bytes"), }); } - let minimum_buffer_size = match get_minimum_buffer_size(max_data_file_size) { - Some(value) => value, - None => { - unreachable!("maximum data file size should be correctly limited at this point") - } + let Some(minimum_buffer_size) = get_minimum_buffer_size(max_data_file_size) else { + unreachable!("maximum data file size should be correctly limited at this point") }; if max_buffer_size < minimum_buffer_size { return Err(BuildError::InvalidParameter { param_name: "max_buffer_size", - reason: format!( - "must be greater than or equal to {} bytes", - minimum_buffer_size - ), + reason: format!("must be greater than or equal to {minimum_buffer_size} bytes"), }); } @@ -349,21 +340,15 @@ where if max_record_size <= MINIMUM_MAX_RECORD_SIZE { return Err(BuildError::InvalidParameter { param_name: "max_record_size", - reason: format!( - "must be greater than or equal to {} bytes", - MINIMUM_MAX_RECORD_SIZE, - ), + reason: format!("must be greater than or equal to {MINIMUM_MAX_RECORD_SIZE} bytes",), }); } - let max_record_size_converted = match u64::try_from(max_record_size) { - Ok(value) => value, - Err(_) => { - return Err(BuildError::InvalidParameter { - param_name: "max_record_size", - reason: "must be less than 2^64 bytes".to_string(), - }) - } + let Ok(max_record_size_converted) = u64::try_from(max_record_size) else { + return Err(BuildError::InvalidParameter { + param_name: "max_record_size", + reason: "must be less than 2^64 bytes".to_string(), + }) }; if max_record_size_converted > max_data_file_size { diff --git a/lib/vector-buffers/src/variants/disk_v2/reader.rs b/lib/vector-buffers/src/variants/disk_v2/reader.rs index 8bffd7c398971..012824ec59402 100644 --- a/lib/vector-buffers/src/variants/disk_v2/reader.rs +++ b/lib/vector-buffers/src/variants/disk_v2/reader.rs @@ -307,9 +307,7 @@ where &mut self, is_finalized: bool, ) -> Result, ReaderError> { - let record_len = if let Some(len) = self.read_length_delimiter(is_finalized).await? { - len - } else { + let Some(record_len) = self.read_length_delimiter(is_finalized).await? else { return Ok(None); }; @@ -852,12 +850,11 @@ where } => { let record = try_as_record_archive(data_file_mmap.as_ref()) .expect("record was already validated"); - let item = match decode_record_payload::(record) { - Ok(item) => item, + let Ok(item) = decode_record_payload::(record) else { // If there's an error decoding the item, just fall back to the slow path, // because this file might actually be where we left off, so we don't want // to incorrectly skip ahead or anything. - Err(_) => break, + break }; // We have to remove 1 from the event count here because otherwise the ID would diff --git a/lib/vector-buffers/src/variants/disk_v2/tests/known_errors.rs b/lib/vector-buffers/src/variants/disk_v2/tests/known_errors.rs index 55abdf52e694e..06cd4102af579 100644 --- a/lib/vector-buffers/src/variants/disk_v2/tests/known_errors.rs +++ b/lib/vector-buffers/src/variants/disk_v2/tests/known_errors.rs @@ -791,11 +791,8 @@ async fn reader_throws_error_when_record_is_undecodable_via_metadata() { CAN_DECODE_VALUE.store(1, Ordering::Relaxed); let second_read_result = reader.next().await; assert!(matches!(second_read_result, Err(ReaderError::Incompatible { .. }))); - let second_read_error_reason = if let ReaderError::Incompatible { reason } = second_read_result.unwrap_err() { - reason - } else { - panic!("error should be ReadError::Incompatible"); - }; + let ReaderError::Incompatible { reason: second_read_error_reason } = second_read_result.unwrap_err() + else { panic!("error should be ReadError::Incompatible") }; let expected_second_read_error_reason = format!("record metadata not supported (metadata: {:#036b})", 0_u32); assert_eq!(expected_second_read_error_reason, second_read_error_reason); @@ -806,15 +803,11 @@ async fn reader_throws_error_when_record_is_undecodable_via_metadata() { // should cause an "incompatible" error: let third_read_result = reader.next().await; assert!(matches!(third_read_result, Err(ReaderError::Incompatible { .. }))); - let third_read_error_reason = if let ReaderError::Incompatible { reason } = third_read_result.unwrap_err() { - reason - } else { - panic!("error should be ReadError::Incompatible"); - }; + let ReaderError::Incompatible { reason: third_read_error_reason } = third_read_result.unwrap_err() + else { panic!("error should be ReadError::Incompatible") }; let expected_third_read_error_reason_prefix = "invalid metadata for"; assert!(third_read_error_reason.starts_with(expected_third_read_error_reason_prefix), - "error reason when metadata cannot be converted should start with 'metadata invalid for', got '{}' instead", - third_read_error_reason); + "error reason when metadata cannot be converted should start with 'metadata invalid for', got '{third_read_error_reason}' instead"); } }) .await; diff --git a/lib/vector-buffers/src/variants/disk_v2/tests/model/mod.rs b/lib/vector-buffers/src/variants/disk_v2/tests/model/mod.rs index 2e2932fe13a42..eef5f94ec85e2 100644 --- a/lib/vector-buffers/src/variants/disk_v2/tests/model/mod.rs +++ b/lib/vector-buffers/src/variants/disk_v2/tests/model/mod.rs @@ -374,8 +374,7 @@ impl ReaderModel { assert!( self.filesystem.delete_file(file_id), - "invariant violation: tried to delete file id {}, but file does not exist", - file_id + "invariant violation: tried to delete file id {file_id}, but file does not exist" ); } else { // Not enough delete acks to proceed, so we can't do anything more. diff --git a/lib/vector-common/src/encode_key_value.rs b/lib/vector-common/src/encode_key_value.rs index 40cdcc03b717a..13b646ffd537c 100644 --- a/lib/vector-common/src/encode_key_value.rs +++ b/lib/vector-common/src/encode_key_value.rs @@ -92,7 +92,7 @@ fn flatten<'a>( Ok(map) } -fn encode_field<'a>(output: &mut String, key: &str, value: &str, key_value_delimiter: &'a str) { +fn encode_field(output: &mut String, key: &str, value: &str, key_value_delimiter: &str) { encode_string(output, key); output.push_str(key_value_delimiter); encode_string(output, value); diff --git a/lib/vector-common/src/event_test_util.rs b/lib/vector-common/src/event_test_util.rs index 2854e927ac631..f98b90b1fe9a7 100644 --- a/lib/vector-common/src/event_test_util.rs +++ b/lib/vector-common/src/event_test_util.rs @@ -28,9 +28,8 @@ pub fn contains_name_once(pattern: &str) -> Result<(), String> { Err(format!("Missing event `{pattern}`")) } else if n_events > 1 { Err(format!( - "Multiple ({}) events matching `{}`: ({}). Hint! Don't use the `assert_x_` \ - test helpers on round-trip tests (tests that run more than a single component).", - n_events, pattern, names + "Multiple ({n_events}) events matching `{pattern}`: ({names}). Hint! Don't use the `assert_x_` \ + test helpers on round-trip tests (tests that run more than a single component)." )) } else { Ok(()) diff --git a/lib/vector-common/src/shutdown.rs b/lib/vector-common/src/shutdown.rs index 4564221d73db1..348a079690bef 100644 --- a/lib/vector-common/src/shutdown.rs +++ b/lib/vector-common/src/shutdown.rs @@ -152,30 +152,26 @@ impl SourceShutdownCoordinator { id.clone(), other.shutdown_begun_triggers.remove(id).unwrap_or_else(|| { panic!( - "Other ShutdownCoordinator didn't have a shutdown_begun_trigger for \"{}\"", - id + "Other ShutdownCoordinator didn't have a shutdown_begun_trigger for \"{id}\"" ) }), ); assert!( existing.is_none(), - "ShutdownCoordinator already has a shutdown_begin_trigger for source \"{}\"", - id + "ShutdownCoordinator already has a shutdown_begin_trigger for source \"{id}\"" ); let existing = self.shutdown_force_triggers.insert( id.clone(), other.shutdown_force_triggers.remove(id).unwrap_or_else(|| { panic!( - "Other ShutdownCoordinator didn't have a shutdown_force_trigger for \"{}\"", - id + "Other ShutdownCoordinator didn't have a shutdown_force_trigger for \"{id}\"" ) }), ); assert!( existing.is_none(), - "ShutdownCoordinator already has a shutdown_force_trigger for source \"{}\"", - id + "ShutdownCoordinator already has a shutdown_force_trigger for source \"{id}\"" ); let existing = self.shutdown_complete_tripwires.insert( @@ -185,15 +181,13 @@ impl SourceShutdownCoordinator { .remove(id) .unwrap_or_else(|| { panic!( - "Other ShutdownCoordinator didn't have a shutdown_complete_tripwire for \"{}\"", - id + "Other ShutdownCoordinator didn't have a shutdown_complete_tripwire for \"{id}\"" ) }), ); assert!( existing.is_none(), - "ShutdownCoordinator already has a shutdown_complete_tripwire for source \"{}\"", - id + "ShutdownCoordinator already has a shutdown_complete_tripwire for source \"{id}\"" ); } @@ -219,14 +213,12 @@ impl SourceShutdownCoordinator { let shutdown_complete_tripwire = shutdown_complete_tripwires.remove(&id).unwrap_or_else(|| { panic!( - "shutdown_complete_tripwire for source \"{}\" not found in the ShutdownCoordinator", - id + "shutdown_complete_tripwire for source \"{id}\" not found in the ShutdownCoordinator" ) }); let shutdown_force_trigger = shutdown_force_triggers.remove(&id).unwrap_or_else(|| { panic!( - "shutdown_force_trigger for source \"{}\" not found in the ShutdownCoordinator", - id + "shutdown_force_trigger for source \"{id}\" not found in the ShutdownCoordinator" ) }); @@ -260,8 +252,7 @@ impl SourceShutdownCoordinator { ) -> impl Future { let begin_shutdown_trigger = self.shutdown_begun_triggers.remove(id).unwrap_or_else(|| { panic!( - "shutdown_begun_trigger for source \"{}\" not found in the ShutdownCoordinator", - id + "shutdown_begun_trigger for source \"{id}\" not found in the ShutdownCoordinator" ) }); // This is what actually triggers the source to begin shutting down. @@ -272,14 +263,12 @@ impl SourceShutdownCoordinator { .remove(id) .unwrap_or_else(|| { panic!( - "shutdown_complete_tripwire for source \"{}\" not found in the ShutdownCoordinator", - id + "shutdown_complete_tripwire for source \"{id}\" not found in the ShutdownCoordinator" ) }); let shutdown_force_trigger = self.shutdown_force_triggers.remove(id).unwrap_or_else(|| { panic!( - "shutdown_force_trigger for source \"{}\" not found in the ShutdownCoordinator", - id + "shutdown_force_trigger for source \"{id}\" not found in the ShutdownCoordinator" ) }); SourceShutdownCoordinator::shutdown_source_complete( diff --git a/lib/vector-config-macros/src/component_name.rs b/lib/vector-config-macros/src/component_name.rs index c423f30533b24..a431adeeebb1b 100644 --- a/lib/vector-config-macros/src/component_name.rs +++ b/lib/vector-config-macros/src/component_name.rs @@ -126,8 +126,7 @@ fn attr_to_component_name(attr: &Attribute) -> Result, Error> { return Err(Error::new( attr.span(), format!( - "{}s must have a name specified (e.g. `{}(\"my_component\")`)", - component_type, component_type_attr + "{component_type}s must have a name specified (e.g. `{component_type_attr}(\"my_component\")`)" ), )); } @@ -139,8 +138,7 @@ fn attr_to_component_name(attr: &Attribute) -> Result, Error> { Error::new( attr.span(), format!( - "expected a string literal for the {} name (i.e. `{}(\"...\")`)", - component_type, component_type_attr + "expected a string literal for the {component_type} name (i.e. `{component_type_attr}(\"...\")`)" ), ) }) @@ -173,6 +171,6 @@ fn check_component_name_validity(component_name: &str) -> Result<(), String> { if component_name == component_name_converted { Ok(()) } else { - Err(format!("component names must be lowercase, and contain only letters, numbers, and underscores (e.g. \"{}\")", component_name_converted)) + Err(format!("component names must be lowercase, and contain only letters, numbers, and underscores (e.g. \"{component_name_converted}\")")) } } diff --git a/lib/vector-config-macros/src/configurable.rs b/lib/vector-config-macros/src/configurable.rs index be8a3bffb62ba..d8f6d688e6581 100644 --- a/lib/vector-config-macros/src/configurable.rs +++ b/lib/vector-config-macros/src/configurable.rs @@ -176,8 +176,7 @@ fn generate_named_struct_field( .expect("named struct fields must always have an ident"); let field_schema_ty = get_field_schema_ty(field); let field_already_contained = format!( - "schema properties already contained entry for `{}`, this should not occur", - field_name + "schema properties already contained entry for `{field_name}`, this should not occur" ); let field_key = field.name(); @@ -601,8 +600,7 @@ fn generate_named_enum_field(field: &Field<'_>) -> proc_macro2::TokenStream { let field_name = field.ident().expect("field should be named"); let field_ty = field.ty(); let field_already_contained = format!( - "schema properties already contained entry for `{}`, this should not occur", - field_name + "schema properties already contained entry for `{field_name}`, this should not occur" ); let field_key = field.name().to_string(); @@ -752,7 +750,7 @@ fn generate_enum_variant_schema(variant: &Variant<'_>) -> proc_macro2::TokenStre // { "field_using_enum": { "": "VariantName" } } Tagging::Internal { tag } => match variant.style() { Style::Struct => { - let tag_already_contained = format!("enum tag `{}` already contained as a field in variant; tag cannot overlap with any fields in any variant", tag); + let tag_already_contained = format!("enum tag `{tag}` already contained as a field in variant; tag cannot overlap with any fields in any variant"); // Just generate the tag field directly and pass it along to be included in the // struct schema. diff --git a/lib/vector-config/tests/integration/smoke.rs b/lib/vector-config/tests/integration/smoke.rs index bd16c43188240..ae4465d9bf425 100644 --- a/lib/vector-config/tests/integration/smoke.rs +++ b/lib/vector-config/tests/integration/smoke.rs @@ -540,8 +540,8 @@ fn generate_semi_real_schema() { let json = serde_json::to_string_pretty(&schema) .expect("rendering root schema to JSON should not fail"); - println!("{}", json); + println!("{json}"); } - Err(e) => eprintln!("error while generating schema: {:?}", e), + Err(e) => eprintln!("error while generating schema: {e:?}"), } } diff --git a/lib/vector-core/src/event/log_event.rs b/lib/vector-core/src/event/log_event.rs index ec5a365b499d2..2377678a8c29c 100644 --- a/lib/vector-core/src/event/log_event.rs +++ b/lib/vector-core/src/event/log_event.rs @@ -384,10 +384,7 @@ impl LogEvent { /// Merge all fields specified at `fields` from `incoming` to `current`. pub fn merge(&mut self, mut incoming: LogEvent, fields: &[impl AsRef]) { for field in fields { - let incoming_val = match incoming.remove(field.as_ref()) { - None => continue, - Some(val) => val, - }; + let Some(incoming_val) = incoming.remove(field.as_ref()) else { continue }; match self.get_mut(field.as_ref()) { None => { self.insert(field.as_ref(), incoming_val); diff --git a/lib/vector-core/src/event/lua/event.rs b/lib/vector-core/src/event/lua/event.rs index 2beb3d20e16fa..559361c2daa77 100644 --- a/lib/vector-core/src/event/lua/event.rs +++ b/lib/vector-core/src/event/lua/event.rs @@ -36,15 +36,12 @@ impl<'a> ToLua<'a> for LuaEvent { impl<'a> FromLua<'a> for Event { fn from_lua(value: LuaValue<'a>, lua: &'a Lua) -> LuaResult { - let table = match &value { - LuaValue::Table(t) => t, - _ => { - return Err(LuaError::FromLuaConversionError { - from: value.type_name(), - to: "Event", - message: Some("Event should be a Lua table".to_string()), - }) - } + let LuaValue::Table(table) = &value else { + return Err(LuaError::FromLuaConversionError { + from: value.type_name(), + to: "Event", + message: Some("Event should be a Lua table".to_string()), + }) }; match (table.raw_get("log")?, table.raw_get("metric")?) { (LuaValue::Table(log), LuaValue::Nil) => { diff --git a/lib/vector-core/src/event/metric/data.rs b/lib/vector-core/src/event/metric/data.rs index 23a193d5fe553..d850f9441033a 100644 --- a/lib/vector-core/src/event/metric/data.rs +++ b/lib/vector-core/src/event/metric/data.rs @@ -100,12 +100,9 @@ impl MetricData { other.time.interval_ms, ) { (Some(t1), Some(i1), Some(t2), Some(i2)) => { - let delta_t = match TryInto::::try_into( + let Ok(delta_t) = TryInto::::try_into( t1.timestamp_millis().abs_diff(t2.timestamp_millis()), - ) { - Ok(delta_t) => delta_t, - Err(_) => return false, - }; + ) else { return false }; if t1 > t2 { // The interval window starts from the beginning of `other` (aka `t2`) diff --git a/lib/vector-core/src/event/metric/mod.rs b/lib/vector-core/src/event/metric/mod.rs index 63b8cf966ea00..dad397639e8be 100644 --- a/lib/vector-core/src/event/metric/mod.rs +++ b/lib/vector-core/src/event/metric/mod.rs @@ -504,8 +504,7 @@ impl TryFrom<::value::Value> for MetricKind { "incremental" => Ok(Self::Incremental), "absolute" => Ok(Self::Absolute), value => Err(format!( - "invalid metric kind {}, metric kind must be `absolute` or `incremental`", - value + "invalid metric kind {value}, metric kind must be `absolute` or `incremental`" )), } } diff --git a/lib/vector-core/src/event/vrl_target.rs b/lib/vector-core/src/event/vrl_target.rs index c363dc573fc1d..0ab4748e119c5 100644 --- a/lib/vector-core/src/event/vrl_target.rs +++ b/lib/vector-core/src/event/vrl_target.rs @@ -1195,10 +1195,7 @@ mod test { )])) ); - let metric = match target { - VrlTarget::Metric { metric, .. } => metric, - _ => unreachable!(), - }; + let VrlTarget::Metric { metric, .. } = target else { unreachable!() }; // get single value (should be the last one) assert_eq!(metric.tag_value("foo"), Some("b".into())); diff --git a/lib/vector-core/src/metrics/ddsketch.rs b/lib/vector-core/src/metrics/ddsketch.rs index 928b568dbe7c1..8487a20471e09 100644 --- a/lib/vector-core/src/metrics/ddsketch.rs +++ b/lib/vector-core/src/metrics/ddsketch.rs @@ -1291,9 +1291,7 @@ mod tests { assert!( (positive - negative).abs() <= 1.0e-6, - "positive vs negative difference too great ({} vs {})", - positive, - negative + "positive vs negative difference too great ({positive} vs {negative})" ); } @@ -1499,18 +1497,14 @@ mod tests { let max_observed_rel_acc = check_max_relative_accuracy(config, min_value, max_value); assert!( max_observed_rel_acc <= rel_acc + FLOATING_POINT_ACCEPTABLE_ERROR, - "observed out of bound max relative acc: {}, target rel acc={}", - max_observed_rel_acc, - rel_acc + "observed out of bound max relative acc: {max_observed_rel_acc}, target rel acc={rel_acc}" ); } fn compute_relative_accuracy(target: f64, actual: f64) -> f64 { assert!( !(target < 0.0 || actual < 0.0), - "expected/actual values must be greater than 0.0; target={}, actual={}", - target, - actual + "expected/actual values must be greater than 0.0; target={target}, actual={actual}" ); if target == actual { diff --git a/lib/vector-core/src/schema/requirement.rs b/lib/vector-core/src/schema/requirement.rs index d0b1ee26d2092..53512e7681695 100644 --- a/lib/vector-core/src/schema/requirement.rs +++ b/lib/vector-core/src/schema/requirement.rs @@ -204,8 +204,7 @@ impl std::fmt::Display for ValidationError { got, } => write!( f, - "invalid semantic meaning: {} (expected {}, got {})", - identifier, want, got + "invalid semantic meaning: {identifier} (expected {want}, got {got})" ), Self::MeaningDuplicate { identifier, paths } => write!( f, diff --git a/lib/vector-core/src/serde.rs b/lib/vector-core/src/serde.rs index 8b27f4222c2b7..e8b3970200d99 100644 --- a/lib/vector-core/src/serde.rs +++ b/lib/vector-core/src/serde.rs @@ -84,8 +84,7 @@ pub mod ascii_char { Ok(character as u8) } else { Err(de::Error::custom(format!( - "invalid character: {}, expected character in ASCII range", - character + "invalid character: {character}, expected character in ASCII range" ))) } } diff --git a/lib/vector-core/src/sink.rs b/lib/vector-core/src/sink.rs index bb98bc0d2b482..c397aae977215 100644 --- a/lib/vector-core/src/sink.rs +++ b/lib/vector-core/src/sink.rs @@ -125,10 +125,7 @@ impl + Send + Unpin> EventSink { fn flush_queue(self: &mut Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { while self.queue.is_some() { poll_ready_ok!(self.sink.poll_ready_unpin(cx)); - let event = match self.next_event() { - None => break, - Some(event) => event, - }; + let Some(event) = self.next_event() else { break }; if let Err(err) = self.sink.start_send_unpin(event) { return Poll::Ready(Err(err)); } diff --git a/lib/vector-vrl-functions/src/remove_metadata_field.rs b/lib/vector-vrl-functions/src/remove_metadata_field.rs index 9abd6504c9b23..970fac28873ad 100644 --- a/lib/vector-vrl-functions/src/remove_metadata_field.rs +++ b/lib/vector-vrl-functions/src/remove_metadata_field.rs @@ -54,7 +54,7 @@ impl Function for RemoveMetadataField { if let MetadataKey::Query(query) = &key { if ctx.is_read_only_path(query) { return Err(vrl::function::Error::ReadOnlyMutation { - context: format!("{} is read-only, and cannot be removed", query), + context: format!("{query} is read-only, and cannot be removed"), } .into()); } diff --git a/lib/vector-vrl-functions/src/set_metadata_field.rs b/lib/vector-vrl-functions/src/set_metadata_field.rs index d15cbea80a09f..718b5889ba983 100644 --- a/lib/vector-vrl-functions/src/set_metadata_field.rs +++ b/lib/vector-vrl-functions/src/set_metadata_field.rs @@ -65,7 +65,7 @@ impl Function for SetMetadataField { if let MetadataKey::Query(target_path) = &key { if ctx.is_read_only_path(target_path) { return Err(vrl::function::Error::ReadOnlyMutation { - context: format!("{} is read-only, and cannot be modified", target_path), + context: format!("{target_path} is read-only, and cannot be modified"), } .into()); } diff --git a/lib/vrl/cli/src/repl.rs b/lib/vrl/cli/src/repl.rs index cf1e03b3abf2c..7b1f9ef0f586d 100644 --- a/lib/vrl/cli/src/repl.rs +++ b/lib/vrl/cli/src/repl.rs @@ -367,9 +367,8 @@ fn open_url(url: &str) { #[allow(clippy::print_stdout)] { println!( - "couldn't open default web browser: {}\n\ - you can access the desired documentation at {}", - err, url + "couldn't open default web browser: {err}\n\ + you can access the desired documentation at {url}" ); } } diff --git a/lib/vrl/compiler/src/expression/assignment.rs b/lib/vrl/compiler/src/expression/assignment.rs index 297b09c79b937..f7fa4dae49214 100644 --- a/lib/vrl/compiler/src/expression/assignment.rs +++ b/lib/vrl/compiler/src/expression/assignment.rs @@ -332,7 +332,7 @@ impl Target { Self::Internal(ident, path) => { let type_def = match state.local.variable(ident) { None => TypeDef::null().with_type_inserted(&path.clone().into(), new_type_def), - Some(&Details { ref type_def, .. }) => type_def + Some(Details { type_def, .. }) => type_def .clone() .with_type_inserted(&path.clone().into(), new_type_def), }; diff --git a/lib/vrl/compiler/src/expression/function_call.rs b/lib/vrl/compiler/src/expression/function_call.rs index ef278095db63f..0def228f3c7a3 100644 --- a/lib/vrl/compiler/src/expression/function_call.rs +++ b/lib/vrl/compiler/src/expression/function_call.rs @@ -47,13 +47,11 @@ impl<'a> Builder<'a> { let (ident_span, ident) = ident.take(); // Check if function exists. - let (function_id, function) = if let Some(function) = funcs + let Some((function_id, function)) = funcs .iter() .enumerate() .find(|(_pos, f)| f.identifier() == ident.as_ref()) - { - function - } else { + else { let idents = funcs .iter() .map(|func| func.identifier()) @@ -990,8 +988,7 @@ impl DiagnosticMessage for Error { } => { vec![Label::primary( format!( - r#"required argument missing: "{}" (position {})"#, - keyword, position + r#"required argument missing: "{keyword}" (position {position})"# ), call_span, )] diff --git a/lib/vrl/compiler/src/expression/op.rs b/lib/vrl/compiler/src/expression/op.rs index 8890b1662844d..c7973488873ed 100644 --- a/lib/vrl/compiler/src/expression/op.rs +++ b/lib/vrl/compiler/src/expression/op.rs @@ -398,16 +398,12 @@ mod tests { lhs: impl TryInto + fmt::Debug + Clone, rhs: impl TryInto + fmt::Debug + Clone, ) -> Op { - use std::result::Result::Err; - - let lhs = match lhs.clone().try_into() { - Ok(v) => v, - Err(_) => panic!("not a valid lhs expression: {lhs:?}"), + let Ok(lhs) = lhs.clone().try_into() else { + panic!("not a valid lhs expression: {lhs:?}") }; - let rhs = match rhs.clone().try_into() { - Ok(v) => v, - Err(_) => panic!("not a valid rhs expression: {rhs:?}"), + let Ok(rhs) = rhs.clone().try_into() else { + panic!("not a valid rhs expression: {rhs:?}") }; Op { diff --git a/lib/vrl/compiler/src/lib.rs b/lib/vrl/compiler/src/lib.rs index 453e4ca7aa808..83b28b5e3c38f 100644 --- a/lib/vrl/compiler/src/lib.rs +++ b/lib/vrl/compiler/src/lib.rs @@ -14,7 +14,7 @@ clippy::cast_precision_loss, // allowed in initial deny commit clippy::cast_sign_loss, // allowed in initial deny commit clippy::if_not_else, // allowed in initial deny commit - clippy::let_underscore_drop, // allowed in initial deny commit + let_underscore_drop, // allowed in initial deny commit clippy::match_bool, // allowed in initial deny commit clippy::match_same_arms, // allowed in initial deny commit clippy::match_wild_err_arm, // allowed in initial deny commit diff --git a/lib/vrl/proptests/src/main.rs b/lib/vrl/proptests/src/main.rs index 5ac3e9c48930c..1987543b9fdd5 100644 --- a/lib/vrl/proptests/src/main.rs +++ b/lib/vrl/proptests/src/main.rs @@ -57,8 +57,8 @@ fn main() { let source = "upcase(\").\")"; let program = parser::parse(source).unwrap(); - println!("{:?}", program); - println!("{}", program); + println!("{program:?}"); + println!("{program}"); } prop_compose! { @@ -284,7 +284,7 @@ proptest! { assert_eq!(program.to_string(), expr.to_string(), - "{}", source); + "{source}"); } #[test] @@ -295,6 +295,6 @@ proptest! { assert_eq!(program.to_string(), expr.to_string(), - "{}", source); + "{source}"); } } diff --git a/lib/vrl/stdlib/src/assert_eq.rs b/lib/vrl/stdlib/src/assert_eq.rs index 82c9910513dc2..a24d7d6aa2449 100644 --- a/lib/vrl/stdlib/src/assert_eq.rs +++ b/lib/vrl/stdlib/src/assert_eq.rs @@ -14,8 +14,7 @@ fn assert_eq(left: Value, right: Value, message: Option) -> Resolved { }) } else { Err(ExpressionError::from(format!( - "assertion failed: {} == {}", - left, right + "assertion failed: {left} == {right}" ))) } } diff --git a/lib/vrl/stdlib/src/format_int.rs b/lib/vrl/stdlib/src/format_int.rs index 4f23232ca132c..53946f233518d 100644 --- a/lib/vrl/stdlib/src/format_int.rs +++ b/lib/vrl/stdlib/src/format_int.rs @@ -10,8 +10,7 @@ fn format_int(value: Value, base: Option) -> Resolved { let value = base.try_integer()?; if !(2..=36).contains(&value) { return Err(format!( - "invalid base {}: must be be between 2 and 36 (inclusive)", - value + "invalid base {value}: must be be between 2 and 36 (inclusive)" ) .into()); } diff --git a/lib/vrl/stdlib/src/log_util.rs b/lib/vrl/stdlib/src/log_util.rs index 1263b461978b1..4be36a81894a1 100644 --- a/lib/vrl/stdlib/src/log_util.rs +++ b/lib/vrl/stdlib/src/log_util.rs @@ -163,12 +163,7 @@ fn parse_time( timezone .datetime_from_str(time, format) .or_else(|_| DateTime::parse_from_str(time, format).map(Into::into)) - .map_err(|err| { - format!( - "failed parsing timestamp {} using format {}: {}", - time, format, err - ) - }) + .map_err(|err| format!("failed parsing timestamp {time} using format {format}: {err}")) } /// Takes the field as a string and returns a `Value`. diff --git a/lib/vrl/stdlib/src/match_datadog_query.rs b/lib/vrl/stdlib/src/match_datadog_query.rs index f45bf1aa93c2e..09401432a466d 100644 --- a/lib/vrl/stdlib/src/match_datadog_query.rs +++ b/lib/vrl/stdlib/src/match_datadog_query.rs @@ -397,10 +397,7 @@ impl Filter for VrlFilter { fn resolve_value(buf: LookupBuf, match_fn: Box>) -> Box> { let func = move |obj: &Value| { // Get the value by path, or return early with `false` if it doesn't exist. - let value = match obj.get_by_path(&buf) { - Some(v) => v, - _ => return false, - }; + let Some(value) = obj.get_by_path(&buf) else { return false }; match_fn.run(value) }; diff --git a/lib/vrl/stdlib/src/parse_aws_alb_log.rs b/lib/vrl/stdlib/src/parse_aws_alb_log.rs index 417973fc33c51..81527cda33fce 100644 --- a/lib/vrl/stdlib/src/parse_aws_alb_log.rs +++ b/lib/vrl/stdlib/src/parse_aws_alb_log.rs @@ -236,7 +236,7 @@ fn parse_log(mut input: &str) -> Result { ); field_raw!( "target_status_code_list", - take_maybe_quoted_list(|c| matches!(c, '0'..='9')) + take_maybe_quoted_list(|c| c.is_ascii_digit()) ); field_raw!("classification", take_quoted1); field_raw!("classification_reason", take_quoted1); diff --git a/lib/vrl/stdlib/src/parse_int.rs b/lib/vrl/stdlib/src/parse_int.rs index 01d58dbf1ae69..524919ee7ddd1 100644 --- a/lib/vrl/stdlib/src/parse_int.rs +++ b/lib/vrl/stdlib/src/parse_int.rs @@ -9,8 +9,7 @@ fn parse_int(value: Value, base: Option) -> Resolved { if !(2..=36).contains(&base) { return Err(format!( - "invalid base {}: must be be between 2 and 36 (inclusive)", - value + "invalid base {value}: must be be between 2 and 36 (inclusive)" ) .into()); } diff --git a/lib/vrl/stdlib/src/parse_json.rs b/lib/vrl/stdlib/src/parse_json.rs index 955eca3b6f623..e52cce6a3c774 100644 --- a/lib/vrl/stdlib/src/parse_json.rs +++ b/lib/vrl/stdlib/src/parse_json.rs @@ -83,8 +83,7 @@ fn validate_depth(value: Value) -> std::result::Result { Ok(res as u8) } else { Err(ExpressionError::from(format!( - "max_depth value should be greater than 0 and less than 128, got {}", - res + "max_depth value should be greater than 0 and less than 128, got {res}" ))) } } diff --git a/lib/vrl/stdlib/src/parse_xml.rs b/lib/vrl/stdlib/src/parse_xml.rs index bf51ddc5a6966..b1fe81f5436aa 100644 --- a/lib/vrl/stdlib/src/parse_xml.rs +++ b/lib/vrl/stdlib/src/parse_xml.rs @@ -281,7 +281,7 @@ fn inner_kind() -> Kind { } /// Process an XML node, and return a VRL `Value`. -fn process_node<'a>(node: Node, config: &ParseXmlConfig<'a>) -> Value { +fn process_node(node: Node, config: &ParseXmlConfig<'_>) -> Value { // Helper to recurse over a `Node`s children, and build an object. let recurse = |node: Node| -> BTreeMap { let mut map = BTreeMap::new(); diff --git a/lib/vrl/stdlib/src/unnest.rs b/lib/vrl/stdlib/src/unnest.rs index 21adca53de114..c53f1e38da0bb 100644 --- a/lib/vrl/stdlib/src/unnest.rs +++ b/lib/vrl/stdlib/src/unnest.rs @@ -159,9 +159,7 @@ impl FunctionExpression for UnnestFn { pub(crate) fn invert_array_at_path(typedef: &TypeDef, path: &OwnedValuePath) -> TypeDef { let kind = typedef.kind().at_path(path); - let mut array = if let Some(array) = kind.into_array() { - array - } else { + let Some(mut array) = kind.into_array() else { // Guaranteed fallible. // This can't actually be set to "fallible", or it will cause problems due to // https://github.com/vectordotdev/vector/issues/13527 diff --git a/lib/vrl/tests/src/docs.rs b/lib/vrl/tests/src/docs.rs index e6b9c2c29b8cc..d700d588fa35c 100644 --- a/lib/vrl/tests/src/docs.rs +++ b/lib/vrl/tests/src/docs.rs @@ -186,7 +186,7 @@ impl Test { Self { name: title, - category: format!("docs/{}/{}", category, name), + category: format!("docs/{category}/{name}"), error: None, source, object, diff --git a/lib/vrl/tests/src/main.rs b/lib/vrl/tests/src/main.rs index e77c78f3af964..5668a3c7ed3d0 100644 --- a/lib/vrl/tests/src/main.rs +++ b/lib/vrl/tests/src/main.rs @@ -64,7 +64,7 @@ pub struct Cmd { impl Cmd { fn timezone(&self) -> TimeZone { if let Some(ref tz) = self.timezone { - TimeZone::parse(tz).unwrap_or_else(|| panic!("couldn't parse timezone: {}", tz)) + TimeZone::parse(tz).unwrap_or_else(|| panic!("couldn't parse timezone: {tz}")) } else { TimeZone::Named(Tz::UTC) } @@ -199,7 +199,7 @@ fn main() { let compile_timing_fmt = cmd .timings - .then(|| format!("comp: {:>9.3?}", compile_end)) + .then(|| format!("comp: {compile_end:>9.3?}")) .unwrap_or_default(); match result { @@ -221,7 +221,7 @@ fn main() { let timings_fmt = cmd .timings - .then(|| format!(" ({}, run: {:>9.3?})", compile_timing_fmt, run_end)) + .then(|| format!(" ({compile_timing_fmt}, run: {run_end:>9.3?})")) .unwrap_or_default(); let timings_color = if run_end.as_millis() > 10 { 1 } else { 245 }; @@ -253,7 +253,7 @@ fn main() { match serde_json::from_str::<'_, serde_json::Value>(want.trim()) { Ok(want) => want, Err(err) => { - eprintln!("{}", err); + eprintln!("{err}"); want.into() } } @@ -269,7 +269,7 @@ fn main() { let got = serde_json::to_string_pretty(&got).unwrap(); let diff = prettydiff::diff_lines(&want, &got); - println!(" {}", diff); + println!(" {diff}"); } failed = true; @@ -279,7 +279,7 @@ fn main() { } if cmd.verbose { - println!("{:#}", got); + println!("{got:#}"); } if failed && cmd.fail_early { @@ -301,7 +301,7 @@ fn main() { match serde_json::from_str::<'_, serde_json::Value>(&want) { Ok(want) => want, Err(err) => { - eprintln!("{}", err); + eprintln!("{err}"); want.into() } }; @@ -317,7 +317,7 @@ fn main() { let want = serde_json::to_string_pretty(&want).unwrap(); let got = serde_json::to_string_pretty(&got).unwrap(); let diff = prettydiff::diff_lines(&want, &got); - println!("{}", diff); + println!("{diff}"); } failed = true; @@ -328,7 +328,7 @@ fn main() { if !cmd.no_diff { let diff = prettydiff::diff_lines(&want, &got); - println!("{}", diff); + println!("{diff}"); } failed = true; @@ -336,7 +336,7 @@ fn main() { } if cmd.verbose { - println!("{:#}", err); + println!("{err:#}"); } if failed && cmd.fail_early { @@ -362,7 +362,7 @@ fn main() { { let timings_fmt = cmd .timings - .then(|| format!(" ({})", compile_timing_fmt)) + .then(|| format!(" ({compile_timing_fmt})")) .unwrap_or_default(); let timings = Colour::Fixed(245).paint(timings_fmt); @@ -373,7 +373,7 @@ fn main() { if !cmd.no_diff { let diff = prettydiff::diff_lines(&want, &got); - println!("{}", diff); + println!("{diff}"); } failed = true; @@ -382,7 +382,7 @@ fn main() { if cmd.verbose { formatter.enable_colors(true); - println!("{:#}", formatter); + println!("{formatter:#}"); } if failed && cmd.fail_early { diff --git a/lib/vrl/tests/src/test.rs b/lib/vrl/tests/src/test.rs index bff91bb62b5bb..d7d674342b840 100644 --- a/lib/vrl/tests/src/test.rs +++ b/lib/vrl/tests/src/test.rs @@ -125,7 +125,7 @@ impl Test { match serde_json::from_str::<'_, Value>(&object) { Ok(value) => value, Err(err) => { - error = Some(format!("unable to parse object as JSON: {}", err)); + error = Some(format!("unable to parse object as JSON: {err}")); Value::Null } } diff --git a/lib/vrl/vrl/src/runtime.rs b/lib/vrl/vrl/src/runtime.rs index 12a4a119182f5..5fa7052a6c954 100644 --- a/lib/vrl/vrl/src/runtime.rs +++ b/lib/vrl/vrl/src/runtime.rs @@ -82,7 +82,7 @@ impl Runtime { } Err(err) => { return Err(Terminate::Error( - format!("error querying target object: {}", err).into(), + format!("error querying target object: {err}").into(), )) } }; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index c727eb35b1d1d..596026945c911 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.66.1" +channel = "1.67.1" profile = "default" diff --git a/src/api/schema/events/notification.rs b/src/api/schema/events/notification.rs index 2d02276b3d0e1..546b9c5c96aa3 100644 --- a/src/api/schema/events/notification.rs +++ b/src/api/schema/events/notification.rs @@ -12,7 +12,7 @@ pub struct Matched { impl Matched { pub fn new(pattern: String) -> Self { Self { - message: format!("[tap] Pattern '{}' successfully matched.", pattern), + message: format!("[tap] Pattern '{pattern}' successfully matched."), pattern, } } @@ -31,8 +31,7 @@ impl NotMatched { pub fn new(pattern: String) -> Self { Self { message: format!( - "[tap] Pattern '{}' failed to match: will retry on configuration reload.", - pattern + "[tap] Pattern '{pattern}' failed to match: will retry on configuration reload." ), pattern, } diff --git a/src/api/schema/metrics/filter.rs b/src/api/schema/metrics/filter.rs index 92b5fc6bda4f1..aa1ad538fa4de 100644 --- a/src/api/schema/metrics/filter.rs +++ b/src/api/schema/metrics/filter.rs @@ -331,7 +331,7 @@ pub fn component_sent_events_totals_metrics_with_outputs( match m.value() { MetricValue::Counter { value } if cache - .insert(format!("{}.{}", id, output), *value) + .insert(format!("{id}.{output}"), *value) .unwrap_or(0.00) < *value => { @@ -377,8 +377,7 @@ pub fn component_sent_events_total_throughputs_with_outputs( .iter() .filter_map(|output| { let m = filter_output_metric(metrics.as_ref(), output.as_ref())?; - let throughput = - throughput(&m, format!("{}.{}", id, output), &mut cache)?; + let throughput = throughput(&m, format!("{id}.{output}"), &mut cache)?; Some(OutputThroughput::new(output.clone(), throughput as i64)) }) .collect::>(); diff --git a/src/api/schema/metrics/source/file.rs b/src/api/schema/metrics/source/file.rs index 4108f68d7210e..1fe5a40f7675c 100644 --- a/src/api/schema/metrics/source/file.rs +++ b/src/api/schema/metrics/source/file.rs @@ -312,7 +312,7 @@ mod tests { sort::by_fields(&mut files, &fields); for (i, f) in ["1", "2", "3"].iter().enumerate() { - assert_eq!(files[i].name.as_str(), format!("/path/to/file/{}", f)); + assert_eq!(files[i].name.as_str(), format!("/path/to/file/{f}")); } } @@ -331,7 +331,7 @@ mod tests { sort::by_fields(&mut files, &fields); for (i, f) in ["3", "2", "1"].iter().enumerate() { - assert_eq!(files[i].name.as_str(), format!("/path/to/file/{}", f)); + assert_eq!(files[i].name.as_str(), format!("/path/to/file/{f}")); } } diff --git a/src/api/tap.rs b/src/api/tap.rs index bc189154db32e..519db772ebb34 100644 --- a/src/api/tap.rs +++ b/src/api/tap.rs @@ -99,7 +99,7 @@ impl TapPayload { invalid_matches: Vec, ) -> Self { let pattern = pattern.into(); - let message = format!("[tap] Warning: source inputs cannot be tapped. Input pattern '{}' matches sources {:?}", pattern, invalid_matches); + let message = format!("[tap] Warning: source inputs cannot be tapped. Input pattern '{pattern}' matches sources {invalid_matches:?}"); Self::Notification(Notification::InvalidMatch(InvalidMatch::new( message, pattern, @@ -114,8 +114,7 @@ impl TapPayload { ) -> Self { let pattern = pattern.into(); let message = format!( - "[tap] Warning: sink outputs cannot be tapped. Output pattern '{}' matches sinks {:?}", - pattern, invalid_matches + "[tap] Warning: sink outputs cannot be tapped. Output pattern '{pattern}' matches sinks {invalid_matches:?}" ); Self::Notification(Notification::InvalidMatch(InvalidMatch::new( message, diff --git a/src/app.rs b/src/app.rs index 230043d9a4e3c..dec8a261665ad 100644 --- a/src/app.rs +++ b/src/app.rs @@ -81,15 +81,15 @@ impl Application { .unwrap_or_else(|_| match opts.log_level() { "off" => "off".to_owned(), level => [ - format!("vector={}", level), - format!("codec={}", level), - format!("vrl={}", level), - format!("file_source={}", level), + format!("vector={level}"), + format!("codec={level}"), + format!("vrl={level}"), + format!("file_source={level}"), "tower_limit=trace".to_owned(), - format!("rdkafka={}", level), - format!("buffers={}", level), - format!("lapin={}", level), - format!("kube={}", level), + format!("rdkafka={level}"), + format!("buffers={level}"), + format!("lapin={level}"), + format!("kube={level}"), ] .join(","), }); diff --git a/src/codecs/encoding/transformer.rs b/src/codecs/encoding/transformer.rs index 34d5c8e19058f..db9cc4444c457 100644 --- a/src/codecs/encoding/transformer.rs +++ b/src/codecs/encoding/transformer.rs @@ -324,17 +324,11 @@ mod tests { .unwrap() { Value::Integer(_) => {} - e => panic!( - "Timestamp was not transformed into a Unix timestamp. Was {:?}", - e - ), + e => panic!("Timestamp was not transformed into a Unix timestamp. Was {e:?}"), } match event.as_mut_log().get("another").unwrap() { Value::Integer(_) => {} - e => panic!( - "Timestamp was not transformed into a Unix timestamp. Was {:?}", - e - ), + e => panic!("Timestamp was not transformed into a Unix timestamp. Was {e:?}"), } } diff --git a/src/common/datadog.rs b/src/common/datadog.rs index 14f2d7fd8f36b..83f29eeff1f43 100644 --- a/src/common/datadog.rs +++ b/src/common/datadog.rs @@ -71,5 +71,5 @@ pub(crate) const fn get_base_domain_region(site: &str, region: Option) - pub(crate) fn get_api_base_endpoint(endpoint: Option<&String>, site: &str) -> String { endpoint .cloned() - .unwrap_or_else(|| format!("https://api.{}", site)) + .unwrap_or_else(|| format!("https://api.{site}")) } diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 46e55532b6c36..49b7facbd4f6a 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -168,10 +168,7 @@ impl Runner { .insert(validator_name.to_string(), validator) .is_some() { - panic!( - "attempted to add duplicate validator '{}' to runner", - validator_name - ); + panic!("attempted to add duplicate validator '{validator_name}' to runner"); } } @@ -363,18 +360,10 @@ impl Runner { /// returned explaining the cause. fn load_component_test_cases(test_case_data_path: PathBuf) -> Result, String> { std::fs::File::open(test_case_data_path) - .map_err(|e| { - format!( - "I/O error during open of component validation test cases file: {}", - e - ) - }) + .map_err(|e| format!("I/O error during open of component validation test cases file: {e}")) .and_then(|file| { serde_yaml::from_reader(file).map_err(|e| { - format!( - "Deserialization error for component validation test cases file: {}", - e - ) + format!("Deserialization error for component validation test cases file: {e}") }) }) } diff --git a/src/conditions/datadog_search.rs b/src/conditions/datadog_search.rs index 6be28c8f7c24b..c48a0d856f245 100644 --- a/src/conditions/datadog_search.rs +++ b/src/conditions/datadog_search.rs @@ -64,7 +64,7 @@ impl Filter for EventFilter { fn exists(&self, field: Field) -> Box> { match field { Field::Tag(tag) => { - let starts_with = format!("{}:", tag); + let starts_with = format!("{tag}:"); any_string_match("tags", move |value| { value == tag || value.starts_with(&starts_with) @@ -98,7 +98,7 @@ impl Filter for EventFilter { } // Individual tags are compared by element key:value. Field::Tag(tag) => { - let value_bytes = Value::Bytes(format!("{}:{}", tag, to_match).into()); + let value_bytes = Value::Bytes(format!("{tag}:{to_match}").into()); array_match("tags", move |values| values.contains(&value_bytes)) } @@ -115,13 +115,13 @@ impl Filter for EventFilter { match field { // Default fields are matched by word boundary. Field::Default(field) => { - let re = word_regex(&format!("{}*", prefix)); + let re = word_regex(&format!("{prefix}*")); string_match(field, move |value| re.is_match(&value)) } // Tags are recursed until a match is found. Field::Tag(tag) => { - let starts_with = format!("{}:{}", tag, prefix); + let starts_with = format!("{tag}:{prefix}"); any_string_match("tags", move |value| value.starts_with(&starts_with)) } @@ -142,7 +142,7 @@ impl Filter for EventFilter { string_match(field, move |value| re.is_match(&value)) } Field::Tag(tag) => { - let re = wildcard_regex(&format!("{}:{}", tag, wildcard)); + let re = wildcard_regex(&format!("{tag}:{wildcard}")); any_string_match("tags", move |value| re.is_match(&value)) } @@ -1057,7 +1057,7 @@ mod test { // Every query should build successfully. let cond = config .build(&Default::default()) - .unwrap_or_else(|_| panic!("build failed: {}", source)); + .unwrap_or_else(|_| panic!("build failed: {source}")); assert!( cond.check_with_context(pass.clone()).0.is_ok(), diff --git a/src/conditions/vrl.rs b/src/conditions/vrl.rs index aab0e5e9b3723..b0bc4ab2c333f 100644 --- a/src/conditions/vrl.rs +++ b/src/conditions/vrl.rs @@ -126,7 +126,7 @@ impl Conditional for Vrl { ) .colored() .to_string(); - format!("source execution aborted: {}", err) + format!("source execution aborted: {err}") } vrl::Terminate::Error(err) => { let err = Formatter::new( @@ -137,7 +137,7 @@ impl Conditional for Vrl { ) .colored() .to_string(); - format!("source execution failed: {}", err) + format!("source execution failed: {err}") } }); diff --git a/src/config/api.rs b/src/config/api.rs index 7024b08ee8586..81fc368070da4 100644 --- a/src/config/api.rs +++ b/src/config/api.rs @@ -57,9 +57,7 @@ impl Options { // Prefer non default address (Some(a), Some(b)) => { match (Some(a) == default_address(), Some(b) == default_address()) { - (false, false) => { - return Err(format!("Conflicting `api` address: {}, {} .", a, b)) - } + (false, false) => return Err(format!("Conflicting `api` address: {a}, {b} .")), (false, true) => Some(a), (true, _) => Some(b), } diff --git a/src/config/builder.rs b/src/config/builder.rs index 93c901108e70f..394cddceda99d 100644 --- a/src/config/builder.rs +++ b/src/config/builder.rs @@ -333,22 +333,22 @@ impl ConfigBuilder { with.enrichment_tables.keys().for_each(|k| { if self.enrichment_tables.contains_key(k) { - errors.push(format!("duplicate enrichment_table name found: {}", k)); + errors.push(format!("duplicate enrichment_table name found: {k}")); } }); with.sources.keys().for_each(|k| { if self.sources.contains_key(k) { - errors.push(format!("duplicate source id found: {}", k)); + errors.push(format!("duplicate source id found: {k}")); } }); with.sinks.keys().for_each(|k| { if self.sinks.contains_key(k) { - errors.push(format!("duplicate sink id found: {}", k)); + errors.push(format!("duplicate sink id found: {k}")); } }); with.transforms.keys().for_each(|k| { if self.transforms.contains_key(k) { - errors.push(format!("duplicate transform id found: {}", k)); + errors.push(format!("duplicate transform id found: {k}")); } }); with.tests.iter().for_each(|wt| { @@ -358,7 +358,7 @@ impl ConfigBuilder { }); with.secret.keys().for_each(|k| { if self.secret.contains_key(k) { - errors.push(format!("duplicate secret id found: {}", k)); + errors.push(format!("duplicate secret id found: {k}")); } }); if !errors.is_empty() { diff --git a/src/config/cmd.rs b/src/config/cmd.rs index c5a2c0b79c9aa..725ec6123b4d1 100644 --- a/src/config/cmd.rs +++ b/src/config/cmd.rs @@ -237,13 +237,12 @@ mod tests { r#" [sources.in] type = "demo_logs" - format = "${{{}}}" + format = "${{{env_var}}}" [sinks.out] type = "blackhole" - inputs = ["${{{}}}"] - "#, - env_var, env_var_in_arr + inputs = ["${{{env_var_in_arr}}}"] + "# ); let (interpolated_config_source, _) = vars::interpolate( config_source.as_ref(), @@ -268,11 +267,11 @@ mod tests { assert_eq!( json["sources"]["in"]["format"], - json!(format!("${{{}}}", env_var)) + json!(format!("${{{env_var}}}")) ); assert_eq!( json["sinks"]["out"]["inputs"], - json!(vec![format!("${{{}}}", env_var_in_arr)]) + json!(vec![format!("${{{env_var_in_arr}}}")]) ); } @@ -310,18 +309,18 @@ mod tests { "{}/{}/{}", sources .iter() - .map(|source| format!("{}:{}", source, source)) + .map(|source| format!("{source}:{source}")) .collect::>() .join(","), transforms .iter() - .map(|transform| format!("{}:{}", transform, transform)) + .map(|transform| format!("{transform}:{transform}")) .chain(vec!["manually-added-remap:remap".to_string()]) .collect::>() .join(","), sinks .iter() - .map(|sink| format!("{}:{}", sink, sink)) + .map(|sink| format!("{sink}:{sink}")) .collect::>() .join(","), ); diff --git a/src/config/enterprise.rs b/src/config/enterprise.rs index 1dec6783f76d9..05eb56e656de7 100644 --- a/src/config/enterprise.rs +++ b/src/config/enterprise.rs @@ -213,15 +213,14 @@ enum ReportingError { impl Display for ReportingError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - Self::Http(err) => write!(f, "{}", err), + Self::Http(err) => write!(f, "{err}"), Self::StatusCode(status) => { write!( f, - "Request was unsuccessful and could not be retried: {}", - status + "Request was unsuccessful and could not be retried: {status}" ) } - Self::EndpointError(err) => write!(f, "{}", err), + Self::EndpointError(err) => write!(f, "{err}"), Self::TooManyRedirects => { write!(f, "Too many redirects from the server") } @@ -469,9 +468,8 @@ fn setup_logs_reporting( .vector.configuration_key = "{configuration_key}" .vector.configuration_version_hash = "{configuration_version_hash}" .vector.version = "{vector_version}" - {} + {custom_logs_tags_vrl} "#, - custom_logs_tags_vrl, )), ..Default::default() }; @@ -564,9 +562,8 @@ fn setup_metrics_reporting( .tags.configuration_version_hash = "{configuration_version_hash}" .tags.configuration_key = "{configuration_key}" .tags.vector_version = "{vector_version}" - {} - "#, - custom_metric_tags_vrl + {custom_metric_tags_vrl} + "# )), ..Default::default() }; @@ -628,9 +625,9 @@ fn setup_metrics_reporting( fn convert_tags_to_vrl(tags: &IndexMap, is_metric: bool) -> String { let json_tags = serde_json::to_string(&tags).unwrap(); if is_metric { - format!(r#".tags = merge(.tags, {}, deep: true)"#, json_tags) + format!(r#".tags = merge(.tags, {json_tags}, deep: true)"#) } else { - format!(r#". = merge(., {}, deep: true)"#, json_tags) + format!(r#". = merge(., {json_tags}, deep: true)"#) } } @@ -735,10 +732,7 @@ fn build_request<'a>( .header("DD-API-KEY", auth.api_key) .body(Body::from(payload.json_string())) .unwrap_or_else(|_| { - panic!( - "couldn't create {} HTTP request. Please report", - DATADOG_REPORTING_PRODUCT - ) + panic!("couldn't create {DATADOG_REPORTING_PRODUCT} HTTP request. Please report") }) } diff --git a/src/config/format.rs b/src/config/format.rs index dcd6ce1f78a54..aeb50bf9da9ba 100644 --- a/src/config/format.rs +++ b/src/config/format.rs @@ -112,7 +112,7 @@ mod tests { for (input, expected) in cases { let output = Format::from_path(std::path::PathBuf::from(input)); - assert_eq!(expected, output.ok(), "{}", input) + assert_eq!(expected, output.ok(), "{input}") } } @@ -253,23 +253,20 @@ mod tests { Ok(expected) => { #[allow(clippy::expect_fun_call)] // false positive let output: ConfigBuilder = output.expect(&format!( - "expected Ok, got Err with format {:?} and input {:?}", - format, input + "expected Ok, got Err with format {format:?} and input {input:?}" )); let output_json = serde_json::to_value(output).unwrap(); let expected_output: ConfigBuilder = deserialize(expected, Format::Toml) .expect("Invalid TOML passed as an expectation"); let expected_json = serde_json::to_value(expected_output).unwrap(); - assert_eq!(expected_json, output_json, "{}", input) + assert_eq!(expected_json, output_json, "{input}") } Err(expected) => assert_eq!( expected, output.expect_err(&format!( - "expected Err, got Ok with format {:?} and input {:?}", - format, input + "expected Err, got Ok with format {format:?} and input {input:?}" )), - "{}", - input + "{input}" ), } } diff --git a/src/config/graph.rs b/src/config/graph.rs index ed4c92399aaa7..2209bf539315f 100644 --- a/src/config/graph.rs +++ b/src/config/graph.rs @@ -138,8 +138,7 @@ impl Graph { _ => panic!("only transforms and sinks have inputs"), }; Err(format!( - "Input \"{}\" for {} \"{}\" doesn't match any components.", - from, output_type, to + "Input \"{from}\" for {output_type} \"{to}\" doesn't match any components." )) } } @@ -288,7 +287,7 @@ impl Graph { for id in self.valid_inputs() { if let Some(_other) = mapped.insert(id.to_string(), id.clone()) { - errors.insert(format!("Input specifier {} is ambiguous", id)); + errors.insert(format!("Input specifier {id} is ambiguous")); } } diff --git a/src/config/loading/loader.rs b/src/config/loading/loader.rs index b418afa6c0e21..4158ae51cf051 100644 --- a/src/config/loading/loader.rs +++ b/src/config/loading/loader.rs @@ -105,8 +105,7 @@ pub(super) mod process { } Err(err) => { errors.push(format!( - "Could not read entry in config dir: {:?}, {}.", - path, err + "Could not read entry in config dir: {path:?}, {err}." )); } }; diff --git a/src/config/loading/mod.rs b/src/config/loading/mod.rs index b91b5d6e463ac..b55b0e06362bd 100644 --- a/src/config/loading/mod.rs +++ b/src/config/loading/mod.rs @@ -30,7 +30,7 @@ pub static CONFIG_PATHS: Mutex> = Mutex::new(Vec::new()); pub(super) fn read_dir + Debug>(path: P) -> Result> { path.as_ref() .read_dir() - .map_err(|err| vec![format!("Could not read config dir: {:?}, {}.", path, err)]) + .map_err(|err| vec![format!("Could not read config dir: {path:?}, {err}.")]) } pub(super) fn component_name + Debug>(path: P) -> Result> { @@ -38,7 +38,7 @@ pub(super) fn component_name + Debug>(path: P) -> Result + Debug>(path: P) -> Option { diff --git a/src/config/loading/secret.rs b/src/config/loading/secret.rs index 54c83823e2218..12c3c48cbed21 100644 --- a/src/config/loading/secret.rs +++ b/src/config/loading/secret.rs @@ -58,18 +58,18 @@ impl SecretBackendLoader { let secrets = self.secret_keys.iter().flat_map(|(backend_name, keys)| { match self.backends.get_mut(&ComponentKey::from(backend_name.clone())) { None => { - vec![Err(format!("Backend \"{}\" is required for secret retrieval but was not found in config.", backend_name))] + vec![Err(format!("Backend \"{backend_name}\" is required for secret retrieval but was not found in config."))] }, Some(backend) => { debug!(message = "Retrieving secret from a backend.", backend = ?backend_name); match backend.retrieve(keys.clone(), signal_rx) { Err(e) => { - vec![Err(format!("Error while retrieving secret from backend \"{}\": {}.", backend_name, e))] + vec![Err(format!("Error while retrieving secret from backend \"{backend_name}\": {e}."))] }, Ok(s) => { s.into_iter().map(|(k, v)| { trace!(message = "Successfully retrieved a secret.", backend = ?backend_name, secret_key = ?k); - Ok((format!("{}.{}", backend_name, k), v)) + Ok((format!("{backend_name}.{k}"), v)) }).collect::>>() } } diff --git a/src/config/loading/secret_backend_example.rs b/src/config/loading/secret_backend_example.rs index 2466d7da387f7..4573df934fb3e 100644 --- a/src/config/loading/secret_backend_example.rs +++ b/src/config/loading/secret_backend_example.rs @@ -33,7 +33,7 @@ async fn main() { ( secret.clone(), ExecResponse { - value: format!("{}.retrieved", secret), + value: format!("{secret}.retrieved"), error: None, }, ) diff --git a/src/config/mod.rs b/src/config/mod.rs index 1a91ca21f4ef4..bfb78a4199c4d 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -323,10 +323,10 @@ impl Display for Protocol { impl Display for Resource { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), fmt::Error> { match self { - Resource::Port(address, protocol) => write!(fmt, "{} {}", protocol, address), + Resource::Port(address, protocol) => write!(fmt, "{protocol} {address}"), Resource::SystemFdOffset(offset) => write!(fmt, "systemd {}th socket", offset + 1), - Resource::Fd(fd) => write!(fmt, "file descriptor: {}", fd), - Resource::DiskBuffer(name) => write!(fmt, "disk buffer {:?}", name), + Resource::Fd(fd) => write!(fmt, "file descriptor: {fd}"), + Resource::DiskBuffer(name) => write!(fmt, "disk buffer {name:?}"), } } } @@ -388,8 +388,7 @@ impl TestDefinition { outputs.push(output_id.clone()); } else { errors.push(format!( - r#"Invalid extract_from target in test '{}': '{}' does not exist"#, - name, from + r#"Invalid extract_from target in test '{name}': '{from}' does not exist"# )); } } @@ -411,8 +410,7 @@ impl TestDefinition { Some(output_id.clone()) } else { errors.push(format!( - r#"Invalid no_outputs_from target in test '{}': '{}' does not exist"#, - name, o + r#"Invalid no_outputs_from target in test '{name}': '{o}' does not exist"# )); None } @@ -1432,9 +1430,9 @@ mod resource_tests { let json = serde_json::to_string_pretty(&schema) .expect("rendering root schema to JSON should not fail"); - println!("{}", json); + println!("{json}"); } - Err(e) => eprintln!("error while generating schema: {:?}", e), + Err(e) => eprintln!("error while generating schema: {e:?}"), } } } diff --git a/src/config/schema.rs b/src/config/schema.rs index 2f7e61534bc8e..296f201a04216 100644 --- a/src/config/schema.rs +++ b/src/config/schema.rs @@ -147,13 +147,12 @@ mod test { let mut errors = vec![]; a.append(b, &mut errors); if errors.is_empty() { - assert_eq!(Some(a), expected, "result mismatch: {}", test); + assert_eq!(Some(a), expected, "result mismatch: {test}"); } else { assert_eq!( errors.is_empty(), expected.is_some(), - "error mismatch: {}", - test + "error mismatch: {test}" ); } } diff --git a/src/config/unit_test/mod.rs b/src/config/unit_test/mod.rs index 188cc50339763..8d0658c3c8e43 100644 --- a/src/config/unit_test/mod.rs +++ b/src/config/unit_test/mod.rs @@ -117,7 +117,7 @@ pub async fn build_unit_tests( let mut test_error = errors.join("\n"); // Indent all line breaks test_error = test_error.replace('\n', "\n "); - test_error.insert_str(0, &format!("Failed to build test '{}':\n ", test_name)); + test_error.insert_str(0, &format!("Failed to build test '{test_name}':\n ")); build_errors.push(test_error); } } @@ -551,8 +551,7 @@ fn build_outputs( match condition.build(&Default::default()) { Ok(condition) => conditions.push(condition), Err(error) => errors.push(format!( - "failed to create test condition '{}': {}", - index, error + "failed to create test condition '{index}': {error}" )), } } diff --git a/src/config/unit_test/unit_test_components.rs b/src/config/unit_test/unit_test_components.rs index 69d61e67f0bc3..fe5103c2d6afb 100644 --- a/src/config/unit_test/unit_test_components.rs +++ b/src/config/unit_test/unit_test_components.rs @@ -214,8 +214,7 @@ impl StreamSink for UnitTestSink { break; } Err(error) => { - condition_errors - .push(format!(" condition[{}]: {}", j, error)); + condition_errors.push(format!(" condition[{j}]: {error}")); } } } diff --git a/src/config/validation.rs b/src/config/validation.rs index a7d76bfcd20fb..031ab5994573e 100644 --- a/src/config/validation.rs +++ b/src/config/validation.rs @@ -138,10 +138,7 @@ pub fn check_resources(config: &ConfigBuilder) -> Result<(), Vec> { Err(conflicting_components .into_iter() .map(|(resource, components)| { - format!( - "Resource `{}` is claimed by multiple components: {:?}", - resource, components - ) + format!("Resource `{resource}` is claimed by multiple components: {components:?}") }) .collect()) } diff --git a/src/config/vars.rs b/src/config/vars.rs index 3b923a9687bfe..ce4ec88824e95 100644 --- a/src/config/vars.rs +++ b/src/config/vars.rs @@ -44,22 +44,20 @@ pub fn interpolate( Some(v) if !v.is_empty() => v, _ => { errors.push(format!( - "Non-empty env var required in config. name = {:?}, error = {:?}", - name, def_or_err + "Non-empty env var required in config. name = {name:?}, error = {def_or_err:?}" )); "" }, } "?" => val.unwrap_or_else(|| { errors.push(format!( - "Missing env var required in config. name = {:?}, error = {:?}", - name, def_or_err + "Missing env var required in config. name = {name:?}, error = {def_or_err:?}" )); "" }), _ => val.unwrap_or_else(|| { warnings - .push(format!("Unknown env var in config. name = {:?}", name)); + .push(format!("Unknown env var in config. name = {name:?}")); "" }), } diff --git a/src/docker.rs b/src/docker.rs index 68eec29fb88aa..5d8c0fe8945bb 100644 --- a/src/docker.rs +++ b/src/docker.rs @@ -85,7 +85,7 @@ pub fn docker(host: Option, tls: Option) -> crate::Resu .map_err(Into::into) } } - Some(scheme) => Err(format!("Unknown scheme: {}", scheme).into()), + Some(scheme) => Err(format!("Unknown scheme: {scheme}").into()), } } } @@ -114,10 +114,7 @@ fn get_authority(url: &str) -> Result { async fn pull_image(docker: &Docker, image: &str, tag: &str) { let mut filters = HashMap::new(); - filters.insert( - String::from("reference"), - vec![format!("{}:{}", image, tag)], - ); + filters.insert(String::from("reference"), vec![format!("{image}:{tag}")]); let options = Some(ListImagesOptions { filters, @@ -138,7 +135,7 @@ async fn pull_image(docker: &Docker, image: &str, tag: &str) { .for_each(|item| async move { let info = item.unwrap(); if let Some(error) = info.error { - panic!("{:?}", error); + panic!("{error:?}"); } }) .await @@ -180,7 +177,7 @@ impl Container { } pub fn bind(mut self, src: impl std::fmt::Display, dst: &str) -> Self { - let bind = format!("{}:{}", src, dst); + let bind = format!("{src}:{dst}"); self.binds.get_or_insert_with(Vec::new).push(bind); self } diff --git a/src/encoding_transcode.rs b/src/encoding_transcode.rs index 7551da540ee5a..20601eeb17f35 100644 --- a/src/encoding_transcode.rs +++ b/src/encoding_transcode.rs @@ -292,10 +292,7 @@ mod tests { assert_eq!( d.decode_to_utf8(Bytes::from(problematic_input)), - Bytes::from(format!( - "{}{}123", - REPLACEMENT_CHARACTER, REPLACEMENT_CHARACTER - )) + Bytes::from(format!("{REPLACEMENT_CHARACTER}{REPLACEMENT_CHARACTER}123")) ); } diff --git a/src/enrichment_tables/file.rs b/src/enrichment_tables/file.rs index bb599e703ef1f..00941bc74f4c5 100644 --- a/src/enrichment_tables/file.rs +++ b/src/enrichment_tables/file.rs @@ -140,10 +140,7 @@ impl FileConfig { .from_utc_datetime( &chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d") .map_err(|_| { - format!( - "unable to parse date {} found in row {}", - value, row - ) + format!("unable to parse date {value} found in row {row}") })? .and_hms_opt(0, 0, 0) .expect("invalid timestamp"), @@ -156,10 +153,7 @@ impl FileConfig { .from_utc_datetime( &chrono::NaiveDate::parse_from_str(value, format) .map_err(|_| { - format!( - "unable to parse date {} found in row {}", - value, row - ) + format!("unable to parse date {value} found in row {row}") })? .and_hms_opt(0, 0, 0) .expect("invalid timestamp"), @@ -171,9 +165,7 @@ impl FileConfig { Conversion::parse(format, timezone).map_err(|err| err.to_string())?; conversion .convert(Bytes::copy_from_slice(value.as_bytes())) - .map_err(|_| { - format!("unable to parse {} found in row {}", value, row) - })? + .map_err(|_| format!("unable to parse {value} found in row {row}"))? } } } @@ -348,7 +340,7 @@ impl File { }) .collect::>() .join(", "); - Err(format!("field(s) '{}' missing from dataset", missing)) + Err(format!("field(s) '{missing}' missing from dataset")) } else { Ok(normalized) } diff --git a/src/expiring_hash_map.rs b/src/expiring_hash_map.rs index c8bbfbf6e2925..a5830408e9d33 100644 --- a/src/expiring_hash_map.rs +++ b/src/expiring_hash_map.rs @@ -47,7 +47,7 @@ where K: Borrow, Q: ?Sized + Hash + Eq, { - self.map.get(k).map(|&(ref v, _)| v) + self.map.get(k).map(|(v, _)| v) } /// Get a mut reference to the value by key. @@ -247,7 +247,7 @@ mod tests { async fn next_expired_does_not_wake_when_the_value_is_available_upfront() { let mut map = ExpiringHashMap::::default(); - let a_minute_ago = Instant::now() - Duration::from_secs(60); + let a_minute_ago = Instant::now().checked_sub(Duration::from_secs(60)).unwrap(); map.insert_at("key".to_owned(), "val".to_owned(), a_minute_ago); let mut fut = task::spawn(map.next_expired()); diff --git a/src/generate.rs b/src/generate.rs index 3f6107e3b45f4..9beb8721d585d 100644 --- a/src/generate.rs +++ b/src/generate.rs @@ -130,8 +130,7 @@ pub(crate) fn generate_example( let (name, source_type) = if let Some(c_index) = source_expr.find(':') { if c_index == 0 { errs.push(format!( - "failed to generate source '{}': empty name is not allowed", - source_expr + "failed to generate source '{source_expr}': empty name is not allowed" )); continue; } @@ -141,7 +140,7 @@ pub(crate) fn generate_example( chopped_expr.drain(1..).collect(), ) } else { - (format!("source{}", i), source_expr.clone()) + (format!("source{i}"), source_expr.clone()) }; source_names.push(name.clone()); @@ -149,10 +148,7 @@ pub(crate) fn generate_example( Ok(example) => example, Err(err) => { if err != ExampleError::MissingExample { - errs.push(format!( - "failed to generate source '{}': {}", - source_type, err - )); + errs.push(format!("failed to generate source '{source_type}': {err}")); } Value::Table(Map::new()) } @@ -178,8 +174,7 @@ pub(crate) fn generate_example( let (name, transform_type) = if let Some(c_index) = transform_expr.find(':') { if c_index == 0 { errs.push(format!( - "failed to generate transform '{}': empty name is not allowed", - transform_expr + "failed to generate transform '{transform_expr}': empty name is not allowed" )); continue; } @@ -189,7 +184,7 @@ pub(crate) fn generate_example( chopped_expr.drain(1..).collect(), ) } else { - (format!("transform{}", i), transform_expr.clone()) + (format!("transform{i}"), transform_expr.clone()) }; transform_names.push(name.clone()); @@ -213,8 +208,7 @@ pub(crate) fn generate_example( Err(err) => { if err != ExampleError::MissingExample { errs.push(format!( - "failed to generate transform '{}': {}", - transform_type, err + "failed to generate transform '{transform_type}': {err}" )); } Value::Table(Map::new()) @@ -246,8 +240,7 @@ pub(crate) fn generate_example( let (name, sink_type) = if let Some(c_index) = sink_expr.find(':') { if c_index == 0 { errs.push(format!( - "failed to generate sink '{}': empty name is not allowed", - sink_expr + "failed to generate sink '{sink_expr}': empty name is not allowed" )); continue; } @@ -257,14 +250,14 @@ pub(crate) fn generate_example( chopped_expr.drain(1..).collect(), ) } else { - (format!("sink{}", i), sink_expr.clone()) + (format!("sink{i}"), sink_expr.clone()) }; let mut example = match SinkDescription::example(&sink_type) { Ok(example) => example, Err(err) => { if err != ExampleError::MissingExample { - errs.push(format!("failed to generate sink '{}': {}", sink_type, err)); + errs.push(format!("failed to generate sink '{sink_type}': {err}")); } Value::Table(Map::new()) } @@ -308,7 +301,7 @@ pub(crate) fn generate_example( match toml::to_string(&globals) { Ok(s) => s, Err(err) => { - errs.push(format!("failed to marshal globals: {}", err)); + errs.push(format!("failed to marshal globals: {err}")); return Err(errs); } } @@ -323,7 +316,7 @@ pub(crate) fn generate_example( } }) { Ok(v) => builder = [builder, v].join("\n"), - Err(e) => errs.push(format!("failed to marshal sources: {}", e)), + Err(e) => errs.push(format!("failed to marshal sources: {e}")), } } if let Some(transforms) = config.transforms { @@ -334,7 +327,7 @@ pub(crate) fn generate_example( } }) { Ok(v) => builder = [builder, v].join("\n"), - Err(e) => errs.push(format!("failed to marshal transforms: {}", e)), + Err(e) => errs.push(format!("failed to marshal transforms: {e}")), } } if let Some(sinks) = config.sinks { @@ -345,7 +338,7 @@ pub(crate) fn generate_example( } }) { Ok(v) => builder = [builder, v].join("\n"), - Err(e) => errs.push(format!("failed to marshal sinks: {}", e)), + Err(e) => errs.push(format!("failed to marshal sinks: {e}")), } } @@ -358,7 +351,7 @@ pub(crate) fn generate_example( &file.as_ref().unwrap().join("\n") ) } - Err(e) => errs.push(format!("failed to write to file: {}", e)), + Err(e) => errs.push(format!("failed to write to file: {e}")), }; }; @@ -379,7 +372,7 @@ pub fn cmd(opts: &Opts) -> exitcode::ExitCode { Ok(s) => { #[allow(clippy::print_stdout)] { - println!("{}", s); + println!("{s}"); } exitcode::OK } @@ -416,7 +409,7 @@ mod tests { let mut errors = Vec::new(); for name in SourceDescription::types() { - let param = format!("{}//", name); + let param = format!("{name}//"); let cfg = generate_example(true, ¶m, &None, TransformInputsStrategy::Auto).unwrap(); if let Err(error) = toml::from_str::(&cfg) { errors.push((param, error)); @@ -424,7 +417,7 @@ mod tests { } for name in TransformDescription::types() { - let param = format!("/{}/", name); + let param = format!("/{name}/"); let cfg = generate_example(true, ¶m, &None, TransformInputsStrategy::Auto).unwrap(); if let Err(error) = toml::from_str::(&cfg) { errors.push((param, error)); @@ -432,7 +425,7 @@ mod tests { } for name in SinkDescription::types() { - let param = format!("//{}", name); + let param = format!("//{name}"); let cfg = generate_example(true, ¶m, &None, TransformInputsStrategy::Auto).unwrap(); if let Err(error) = toml::from_str::(&cfg) { errors.push((param, error)); @@ -442,7 +435,7 @@ mod tests { for (component, error) in &errors { #[allow(clippy::print_stdout)] { - println!("{:?} : {}", component, error); + println!("{component:?} : {error}"); } } assert!(errors.is_empty()); diff --git a/src/generate_schema.rs b/src/generate_schema.rs index 58e70563e0a86..1fa37f67cab71 100644 --- a/src/generate_schema.rs +++ b/src/generate_schema.rs @@ -10,14 +10,14 @@ pub fn cmd() -> exitcode::ExitCode { #[allow(clippy::print_stdout)] { - println!("{}", json); + println!("{json}"); } exitcode::OK } Err(e) => { #[allow(clippy::print_stderr)] { - eprintln!("error while generating schema: {:?}", e); + eprintln!("error while generating schema: {e:?}"); } exitcode::SOFTWARE } diff --git a/src/graph.rs b/src/graph.rs index e2cfd8ceef568..536e5b551577d 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -77,7 +77,7 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { Err(errs) => { #[allow(clippy::print_stderr)] for err in errs { - eprintln!("{}", err); + eprintln!("{err}"); } return exitcode::CONFIG; } @@ -86,11 +86,11 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { let mut dot = String::from("digraph {\n"); for (id, _source) in config.sources() { - writeln!(dot, " \"{}\" [shape=trapezium]", id).expect("write to String never fails"); + writeln!(dot, " \"{id}\" [shape=trapezium]").expect("write to String never fails"); } for (id, transform) in config.transforms() { - writeln!(dot, " \"{}\" [shape=diamond]", id).expect("write to String never fails"); + writeln!(dot, " \"{id}\" [shape=diamond]").expect("write to String never fails"); for input in transform.inputs.iter() { if let Some(port) = &input.port { @@ -101,14 +101,13 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { ) .expect("write to String never fails"); } else { - writeln!(dot, " \"{}\" -> \"{}\"", input, id) - .expect("write to String never fails"); + writeln!(dot, " \"{input}\" -> \"{id}\"").expect("write to String never fails"); } } } for (id, sink) in config.sinks() { - writeln!(dot, " \"{}\" [shape=invtrapezium]", id).expect("write to String never fails"); + writeln!(dot, " \"{id}\" [shape=invtrapezium]").expect("write to String never fails"); for input in &sink.inputs { if let Some(port) = &input.port { @@ -119,8 +118,7 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { ) .expect("write to String never fails"); } else { - writeln!(dot, " \"{}\" -> \"{}\"", input, id) - .expect("write to String never fails"); + writeln!(dot, " \"{input}\" -> \"{id}\"").expect("write to String never fails"); } } } @@ -129,7 +127,7 @@ pub(crate) fn cmd(opts: &Opts) -> exitcode::ExitCode { #[allow(clippy::print_stdout)] { - println!("{}", dot); + println!("{dot}"); } exitcode::OK diff --git a/src/http.rs b/src/http.rs index f5599735aed76..552c65bb4cf79 100644 --- a/src/http.rs +++ b/src/http.rs @@ -80,7 +80,7 @@ where let client = client_builder.build(proxy); let version = crate::get_version(); - let user_agent = HeaderValue::from_str(&format!("Vector/{}", version)) + let user_agent = HeaderValue::from_str(&format!("Vector/{version}")) .expect("Invalid header value for version!"); Ok(HttpClient { client, user_agent }) @@ -313,7 +313,7 @@ pub fn get_http_scheme_from_uri(uri: &Uri) -> &'static str { // it also supports arbitrary schemes, which is where we bomb out down here, since we can't generate a static // string for an arbitrary input string... and anything other than "http" and "https" makes no sense for an HTTP // client anyways. - s => panic!("invalid URI scheme for HTTP client: {}", s), + s => panic!("invalid URI scheme for HTTP client: {s}"), }) } diff --git a/src/internal_events/exec.rs b/src/internal_events/exec.rs index 99f41f143d490..6f5b41fb3d0ba 100644 --- a/src/internal_events/exec.rs +++ b/src/internal_events/exec.rs @@ -166,7 +166,7 @@ impl ExecFailedToSignalChild { match self { #[cfg(unix)] - SignalError(err) => format!("errno_{}", err), + SignalError(err) => format!("errno_{err}"), #[cfg(unix)] FailedToMarshalPid(_) => String::from("failed_to_marshal_pid"), #[cfg(unix)] @@ -183,9 +183,9 @@ impl std::fmt::Display for ExecFailedToSignalChild { match self { #[cfg(unix)] - SignalError(err) => write!(f, "errno: {}", err), + SignalError(err) => write!(f, "errno: {err}"), #[cfg(unix)] - FailedToMarshalPid(err) => write!(f, "failed to marshal pid to i32: {}", err), + FailedToMarshalPid(err) => write!(f, "failed to marshal pid to i32: {err}"), #[cfg(unix)] NoPid => write!(f, "child had no pid"), #[cfg(windows)] diff --git a/src/internal_events/http_client.rs b/src/internal_events/http_client.rs index 080243b30e64d..91a9c4d55d8d0 100644 --- a/src/internal_events/http_client.rs +++ b/src/internal_events/http_client.rs @@ -99,13 +99,13 @@ impl<'a, B: HttpBody> std::fmt::Display for FormatBody<'a, B> { let size = self.0.size_hint(); match (size.lower(), size.upper()) { (0, None) => write!(fmt, "[unknown]"), - (lower, None) => write!(fmt, "[>={} bytes]", lower), + (lower, None) => write!(fmt, "[>={lower} bytes]"), (0, Some(0)) => write!(fmt, "[empty]"), - (0, Some(upper)) => write!(fmt, "[<={} bytes]", upper), + (0, Some(upper)) => write!(fmt, "[<={upper} bytes]"), - (lower, Some(upper)) if lower == upper => write!(fmt, "[{} bytes]", lower), - (lower, Some(upper)) => write!(fmt, "[{}..={} bytes]", lower, upper), + (lower, Some(upper)) if lower == upper => write!(fmt, "[{lower} bytes]"), + (lower, Some(upper)) => write!(fmt, "[{lower}..={upper} bytes]"), } } } diff --git a/src/internal_events/prelude.rs b/src/internal_events/prelude.rs index 56612c3fd7f4e..b9d3701445ee2 100644 --- a/src/internal_events/prelude.rs +++ b/src/internal_events/prelude.rs @@ -6,7 +6,7 @@ feature = "sources-utils-http", ))] pub(crate) fn http_error_code(code: u16) -> String { - format!("http_response_{}", code) + format!("http_response_{code}") } pub(crate) fn io_error_code(error: &std::io::Error) -> &'static str { diff --git a/src/internal_events/template.rs b/src/internal_events/template.rs index 7ee283c3f40d4..e63de05c81fdb 100644 --- a/src/internal_events/template.rs +++ b/src/internal_events/template.rs @@ -15,7 +15,7 @@ impl<'a> InternalEvent for TemplateRenderingError<'a> { let mut msg = "Failed to render template".to_owned(); if let Some(field) = self.field { use std::fmt::Write; - let _ = write!(msg, " for \"{}\"", field); + let _ = write!(msg, " for \"{field}\""); } msg.push('.'); diff --git a/src/internal_telemetry/allocations/allocator/stack.rs b/src/internal_telemetry/allocations/allocator/stack.rs index 5479654501fe7..2ac8bab15410a 100644 --- a/src/internal_telemetry/allocations/allocator/stack.rs +++ b/src/internal_telemetry/allocations/allocator/stack.rs @@ -33,7 +33,7 @@ impl GroupStack { pub fn push(&mut self, group: AllocationGroupId) { self.current_top += 1; if self.current_top >= self.slots.len() { - panic!("tried to push new allocation group to the current stack, but hit the limit of {} entries", N); + panic!("tried to push new allocation group to the current stack, but hit the limit of {N} entries"); } self.slots[self.current_top] = group; } diff --git a/src/lib.rs b/src/lib.rs index e41ee20dd383f..ba4b3eed92ca9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -144,12 +144,12 @@ pub fn get_version() -> String { // or full debug symbols. See the Cargo Book profiling section for value meaning: // https://doc.rust-lang.org/cargo/reference/profiles.html#debug let build_string = match built_info::DEBUG { - "1" => format!("{} debug=line", build_string), - "2" | "true" => format!("{} debug=full", build_string), + "1" => format!("{build_string} debug=line"), + "2" | "true" => format!("{build_string} debug=full"), _ => build_string, }; - format!("{} ({})", pkg_version, build_string) + format!("{pkg_version} ({build_string})") } #[allow(warnings)] diff --git a/src/line_agg.rs b/src/line_agg.rs index b2ba83a525e7f..36b8ece119f96 100644 --- a/src/line_agg.rs +++ b/src/line_agg.rs @@ -720,7 +720,7 @@ mod tests { "START msg 1".to_string(), // will be stashed ]; for i in 0..n { - lines.push(format!("line {}", i)); + lines.push(format!("line {i}")); } let config = Config { start_pattern: Regex::new("").unwrap(), @@ -731,7 +731,7 @@ mod tests { let mut expected = "START msg 1".to_string(); for i in 0..n { - write!(expected, "\nline {}", i).expect("write to String never fails"); + write!(expected, "\nline {i}").expect("write to String never fails"); } let (mut send, recv) = futures::channel::mpsc::unbounded(); diff --git a/src/list.rs b/src/list.rs index ad8b357bcfb10..09588be3a3f1a 100644 --- a/src/list.rs +++ b/src/list.rs @@ -39,22 +39,22 @@ pub fn cmd(opts: &Opts) -> exitcode::ExitCode { Format::Text => { println!("Sources:"); for name in sources { - println!("- {}", name); + println!("- {name}"); } println!("\nTransforms:"); for name in transforms { - println!("- {}", name); + println!("- {name}"); } println!("\nSinks:"); for name in sinks { - println!("- {}", name); + println!("- {name}"); } println!("\nEnrichment tables:"); for name in enrichment_tables { - println!("- {}", name); + println!("- {name}"); } } Format::Json => { diff --git a/src/nats.rs b/src/nats.rs index 5bad2c9182bd0..bf88c3b5a1b5e 100644 --- a/src/nats.rs +++ b/src/nats.rs @@ -63,7 +63,7 @@ impl std::fmt::Display for NatsAuthConfig { CredentialsFile { .. } => "credentials_file", Nkey { .. } => "nkey", }; - write!(f, "{}", word) + write!(f, "{word}") } } diff --git a/src/secrets/exec.rs b/src/secrets/exec.rs index 9095ddc6efa90..e0236fce41dc9 100644 --- a/src/secrets/exec.rs +++ b/src/secrets/exec.rs @@ -76,18 +76,18 @@ impl SecretBackend for ExecBackend { for k in secret_keys.into_iter() { if let Some(secret) = output.get_mut(&k) { if let Some(e) = &secret.error { - return Err(format!("secret for key '{}' was not retrieved: {}", k, e).into()); + return Err(format!("secret for key '{k}' was not retrieved: {e}").into()); } if let Some(v) = secret.value.take() { if v.is_empty() { - return Err(format!("secret for key '{}' was empty", k).into()); + return Err(format!("secret for key '{k}' was empty").into()); } secrets.insert(k.to_string(), v); } else { - return Err(format!("secret for key '{}' was empty", k).into()); + return Err(format!("secret for key '{k}' was empty").into()); } } else { - return Err(format!("secret for key '{}' was not retrieved", k).into()); + return Err(format!("secret for key '{k}' was not retrieved").into()); } } Ok(secrets) @@ -148,7 +148,7 @@ async fn query_backend( match stdout { None => break, Some(Ok(b)) => output.extend(b), - Some(Err(e)) => return Err(format!("Error while reading from an exec backend stdout: {}.", e).into()), + Some(Err(e)) => return Err(format!("Error while reading from an exec backend stdout: {e}.").into()), } } _ = &mut timeout => { diff --git a/src/serde.rs b/src/serde.rs index ec2fc77835f9f..b6671008a2e31 100644 --- a/src/serde.rs +++ b/src/serde.rs @@ -89,7 +89,7 @@ impl Fields { FieldsOrValue::Value(v) => Box::new(std::iter::once((k, v))), FieldsOrValue::Fields(f) => Box::new( f.all_fields() - .map(move |(nested_k, v)| (format!("{}.{}", k, nested_k), v)), + .map(move |(nested_k, v)| (format!("{k}.{nested_k}"), v)), ), } }) diff --git a/src/sinks/amqp/integration_tests.rs b/src/sinks/amqp/integration_tests.rs index ba8a9ee557022..4145867b30c6f 100644 --- a/src/sinks/amqp/integration_tests.rs +++ b/src/sinks/amqp/integration_tests.rs @@ -21,8 +21,7 @@ pub fn make_config() -> AmqpSinkConfig { let user = std::env::var("AMQP_USER").unwrap_or_else(|_| "guest".to_string()); let pass = std::env::var("AMQP_PASSWORD").unwrap_or_else(|_| "guest".to_string()); let vhost = std::env::var("AMQP_VHOST").unwrap_or_else(|_| "%2f".to_string()); - config.connection.connection_string = - format!("amqp://{}:{}@rabbitmq:5672/{}", user, pass, vhost); + config.connection.connection_string = format!("amqp://{user}:{pass}@rabbitmq:5672/{vhost}"); config } diff --git a/src/sinks/apex/integration_tests.rs b/src/sinks/apex/integration_tests.rs index 19c23a338b1bd..6783c4704ae20 100644 --- a/src/sinks/apex/integration_tests.rs +++ b/src/sinks/apex/integration_tests.rs @@ -23,7 +23,7 @@ fn mock_apex_api_token() -> String { } fn line_generator(index: usize) -> String { - format!("random line {}", index) + format!("random line {index}") } fn event_generator(index: usize) -> Event { diff --git a/src/sinks/aws_cloudwatch_logs/integration_tests.rs b/src/sinks/aws_cloudwatch_logs/integration_tests.rs index 5a585bca931e0..6d0e1bf326a40 100644 --- a/src/sinks/aws_cloudwatch_logs/integration_tests.rs +++ b/src/sinks/aws_cloudwatch_logs/integration_tests.rs @@ -339,7 +339,7 @@ async fn cloudwatch_insert_log_event_partitioned() { let stream_name = gen_name(); let config = CloudwatchLogsSinkConfig { group_name: Template::try_from(GROUP_NAME).unwrap(), - stream_name: Template::try_from(format!("{}-{{{{key}}}}", stream_name)).unwrap(), + stream_name: Template::try_from(format!("{stream_name}-{{{{key}}}}")).unwrap(), region: RegionOrEndpoint::with_both("localstack", watchlogs_address().as_str()), encoding: TextSerializerConfig::default().into(), create_missing_group: None, @@ -375,7 +375,7 @@ async fn cloudwatch_insert_log_event_partitioned() { let response = create_client_test() .await .get_log_events() - .log_stream_name(format!("{}-0", stream_name)) + .log_stream_name(format!("{stream_name}-0")) .log_group_name(GROUP_NAME) .start_time(timestamp.timestamp_millis()) .send() @@ -399,7 +399,7 @@ async fn cloudwatch_insert_log_event_partitioned() { let response = create_client_test() .await .get_log_events() - .log_stream_name(format!("{}-1", stream_name)) + .log_stream_name(format!("{stream_name}-1")) .log_group_name(GROUP_NAME) .start_time(timestamp.timestamp_millis()) .send() diff --git a/src/sinks/aws_cloudwatch_logs/service.rs b/src/sinks/aws_cloudwatch_logs/service.rs index d177b2aae1923..e0a672eb62b88 100644 --- a/src/sinks/aws_cloudwatch_logs/service.rs +++ b/src/sinks/aws_cloudwatch_logs/service.rs @@ -70,13 +70,13 @@ pub enum CloudwatchError { impl fmt::Display for CloudwatchError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - CloudwatchError::Put(error) => write!(f, "CloudwatchError::Put: {}", error), - CloudwatchError::Describe(error) => write!(f, "CloudwatchError::Describe: {}", error), + CloudwatchError::Put(error) => write!(f, "CloudwatchError::Put: {error}"), + CloudwatchError::Describe(error) => write!(f, "CloudwatchError::Describe: {error}"), CloudwatchError::CreateStream(error) => { - write!(f, "CloudwatchError::CreateStream: {}", error) + write!(f, "CloudwatchError::CreateStream: {error}") } CloudwatchError::CreateGroup(error) => { - write!(f, "CloudwatchError::CreateGroup: {}", error) + write!(f, "CloudwatchError::CreateGroup: {error}") } CloudwatchError::NoStreamsFound => write!(f, "CloudwatchError: No Streams Found"), } diff --git a/src/sinks/aws_cloudwatch_metrics/integration_tests.rs b/src/sinks/aws_cloudwatch_metrics/integration_tests.rs index 197001885b347..89ba1e98bd9de 100644 --- a/src/sinks/aws_cloudwatch_metrics/integration_tests.rs +++ b/src/sinks/aws_cloudwatch_metrics/integration_tests.rs @@ -64,7 +64,7 @@ async fn cloudwatch_metrics_put_data() { let gauge_name = random_string(10); for i in 0..10 { let event = Event::Metric(Metric::new( - format!("gauge-{}", gauge_name), + format!("gauge-{gauge_name}"), MetricKind::Absolute, MetricValue::Gauge { value: i as f64 }, )); @@ -75,7 +75,7 @@ async fn cloudwatch_metrics_put_data() { for i in 0..10 { let event = Event::Metric( Metric::new( - format!("distribution-{}", distribution_name), + format!("distribution-{distribution_name}"), MetricKind::Incremental, MetricValue::Distribution { samples: vector_core::samples![i as f64 => 100], diff --git a/src/sinks/aws_kinesis/firehose/integration_tests.rs b/src/sinks/aws_kinesis/firehose/integration_tests.rs index 71a992b326246..5f31a3ae84c6b 100644 --- a/src/sinks/aws_kinesis/firehose/integration_tests.rs +++ b/src/sinks/aws_kinesis/firehose/integration_tests.rs @@ -172,7 +172,7 @@ async fn ensure_elasticsearch_domain(domain_name: String) -> String { .expect("no domain status") .arn .expect("arn expected"), - Err(error) => panic!("Unable to create the Elasticsearch domain {:?}", error), + Err(error) => panic!("Unable to create the Elasticsearch domain {error:?}"), }; // wait for ES to be available; it starts up when the ES domain is created @@ -219,7 +219,7 @@ async fn ensure_elasticsearch_delivery_stream( .await { Ok(_) => (), - Err(error) => panic!("Unable to create the delivery stream {:?}", error), + Err(error) => panic!("Unable to create the delivery stream {error:?}"), }; } diff --git a/src/sinks/aws_kinesis/streams/integration_tests.rs b/src/sinks/aws_kinesis/streams/integration_tests.rs index a9a66804e3729..479a3c27bf30b 100644 --- a/src/sinks/aws_kinesis/streams/integration_tests.rs +++ b/src/sinks/aws_kinesis/streams/integration_tests.rs @@ -194,7 +194,7 @@ async fn ensure_stream(stream_name: String) { .await { Ok(_) => (), - Err(error) => panic!("Unable to check the stream {:?}", error), + Err(error) => panic!("Unable to check the stream {error:?}"), }; // Wait for localstack to persist stream, otherwise it returns ResourceNotFound errors diff --git a/src/sinks/aws_s3/integration_tests.rs b/src/sinks/aws_s3/integration_tests.rs index 4f3a698118b79..0b8ffaa4fa45f 100644 --- a/src/sinks/aws_s3/integration_tests.rs +++ b/src/sinks/aws_s3/integration_tests.rs @@ -425,9 +425,9 @@ async fn create_bucket(bucket: &str, object_lock_enabled: bool) { Err(err) => match err { SdkError::ServiceError { err, raw: _ } => match err.kind { CreateBucketErrorKind::BucketAlreadyOwnedByYou(_) => {} - err => panic!("Failed to create bucket: {:?}", err), + err => panic!("Failed to create bucket: {err:?}"), }, - err => panic!("Failed to create bucket: {:?}", err), + err => panic!("Failed to create bucket: {err:?}"), }, } } diff --git a/src/sinks/axiom.rs b/src/sinks/axiom.rs index 597e9fedde8f9..06ca7690de625 100644 --- a/src/sinks/axiom.rs +++ b/src/sinks/axiom.rs @@ -197,7 +197,7 @@ mod integration_tests { email: email.clone(), password: password.clone(), }; - let login_url = format!("{}/auth/signin/credentials", url); + let login_url = format!("{url}/auth/signin/credentials"); let login_res = client .post(&login_url) .json(&login_payload) @@ -238,7 +238,7 @@ mod integration_tests { None => { // Try to initialize the deployment client - .post(format!("{}/auth/init", url)) + .post(format!("{url}/auth/init")) .json(&auth_init_payload) .send() .await @@ -283,7 +283,7 @@ mod integration_tests { name: "Vector Test Token".to_string(), }; let create_token_res: CreateTokenResponse = client - .post(format!("{}/api/v1/tokens/personal", url)) + .post(format!("{url}/api/v1/tokens/personal")) .header("Cookie", session_cookie.clone()) .json(&create_token_payload) .send() @@ -327,8 +327,8 @@ mod integration_tests { description: "Vector Test Dataset".to_string(), }; let create_dataset_res = client - .post(format!("{}/api/v1/datasets", url)) - .header("Authorization", format!("Bearer {}", token)) + .post(format!("{url}/api/v1/datasets")) + .header("Authorization", format!("Bearer {token}")) .json(&create_dataset_payload) .send() .await @@ -392,13 +392,13 @@ mod integration_tests { } let query_req = QueryRequest { - apl: format!("['{}'] | order by _time desc | limit 2", dataset), + apl: format!("['{dataset}'] | order by _time desc | limit 2"), start_time: Utc::now() - Duration::minutes(10), end_time: Utc::now() + Duration::minutes(10), }; let query_res: QueryResponse = client - .post(format!("{}/api/v1/datasets/_apl?format=legacy", url)) - .header("Authorization", format!("Bearer {}", token)) + .post(format!("{url}/api/v1/datasets/_apl?format=legacy")) + .header("Authorization", format!("Bearer {token}")) .json(&query_req) .send() .await diff --git a/src/sinks/azure_blob/integration_tests.rs b/src/sinks/azure_blob/integration_tests.rs index fc4b3ec809ad4..f184cc8af7f28 100644 --- a/src/sinks/azure_blob/integration_tests.rs +++ b/src/sinks/azure_blob/integration_tests.rs @@ -219,7 +219,7 @@ impl AzureBlobSinkConfig { pub async fn new_emulator() -> AzureBlobSinkConfig { let address = std::env::var("AZURE_ADDRESS").unwrap_or_else(|_| "localhost".into()); let config = AzureBlobSinkConfig { - connection_string: Some(format!("UseDevelopmentStorage=true;DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://{}:10000/devstoreaccount1;QueueEndpoint=http://{}:10001/devstoreaccount1;TableEndpoint=http://{}:10002/devstoreaccount1;", address, address, address).into()), + connection_string: Some(format!("UseDevelopmentStorage=true;DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://{address}:10000/devstoreaccount1;QueueEndpoint=http://{address}:10001/devstoreaccount1;TableEndpoint=http://{address}:10002/devstoreaccount1;").into()), storage_account: None, container_name: "logs".to_string(), endpoint: None, @@ -343,7 +343,7 @@ impl AzureBlobSinkConfig { Ok(StatusCode::CONFLICT) => Ok(()), _ => Err(format!("Unexpected status code {}", err.status())), }, - _ => Err(format!("Unexpected error {}", reason)), + _ => Err(format!("Unexpected error {reason}")), }, }; diff --git a/src/sinks/azure_monitor_logs.rs b/src/sinks/azure_monitor_logs.rs index 733c902fa6fff..ba975fe948436 100644 --- a/src/sinks/azure_monitor_logs.rs +++ b/src/sinks/azure_monitor_logs.rs @@ -302,10 +302,8 @@ impl AzureMonitorLogsSink { rfc1123date: &str, len: usize, ) -> crate::Result { - let string_to_hash = format!( - "POST\n{}\n{}\n{}:{}\n{}", - len, CONTENT_TYPE, X_MS_DATE, rfc1123date, RESOURCE - ); + let string_to_hash = + format!("POST\n{len}\n{CONTENT_TYPE}\n{X_MS_DATE}:{rfc1123date}\n{RESOURCE}"); let mut signer = sign::Signer::new(hash::MessageDigest::sha256(), &self.shared_key)?; signer.update(string_to_hash.as_bytes())?; diff --git a/src/sinks/clickhouse/integration_tests.rs b/src/sinks/clickhouse/integration_tests.rs index f231a00dd0c00..fe86cd34edee5 100644 --- a/src/sinks/clickhouse/integration_tests.rs +++ b/src/sinks/clickhouse/integration_tests.rs @@ -208,16 +208,15 @@ async fn insert_events_unix_timestamps_toml_config() { let config: ClickhouseConfig = toml::from_str(&format!( r#" -host = "{}" -table = "{}" +host = "{host}" +table = "{table}" compression = "none" [request] retry_attempts = 1 [batch] max_events = 1 [encoding] -timestamp_format = "unix""#, - host, table +timestamp_format = "unix""# )) .unwrap(); @@ -368,11 +367,10 @@ impl ClickhouseClient { .post(&self.host) // .body(format!( - "CREATE TABLE {} - ({}) + "CREATE TABLE {table} + ({schema}) ENGINE = MergeTree() - ORDER BY (host, timestamp);", - table, schema + ORDER BY (host, timestamp);" )) .send() .await @@ -387,7 +385,7 @@ impl ClickhouseClient { let response = self .client .post(&self.host) - .body(format!("SELECT * FROM {} FORMAT JSON", table)) + .body(format!("SELECT * FROM {table} FORMAT JSON")) .send() .await .unwrap(); @@ -398,7 +396,7 @@ impl ClickhouseClient { let text = response.text().await.unwrap(); match serde_json::from_str(&text) { Ok(value) => value, - Err(_) => panic!("json failed: {:?}", text), + Err(_) => panic!("json failed: {text:?}"), } } } diff --git a/src/sinks/datadog/events/config.rs b/src/sinks/datadog/events/config.rs index bb219c49f165c..0c7f372e365f5 100644 --- a/src/sinks/datadog/events/config.rs +++ b/src/sinks/datadog/events/config.rs @@ -111,7 +111,7 @@ impl DatadogEventsConfig { ); // We know this URI will be valid since we have just built it up ourselves. - http::Uri::try_from(format!("{}/api/v1/events", api_base_endpoint)).expect("URI not valid") + http::Uri::try_from(format!("{api_base_endpoint}/api/v1/events")).expect("URI not valid") } fn build_client(&self, proxy: &ProxyConfig) -> crate::Result { diff --git a/src/sinks/datadog/events/tests.rs b/src/sinks/datadog/events/tests.rs index 3a65fe5e2c250..ddeca92c3b38c 100644 --- a/src/sinks/datadog/events/tests.rs +++ b/src/sinks/datadog/events/tests.rs @@ -52,7 +52,7 @@ async fn start_test( let addr = next_addr(); // Swap out the endpoint so we can force send it // to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); @@ -117,7 +117,7 @@ async fn api_key_in_metadata() { let addr = next_addr(); // Swap out the endpoint so we can force send it // to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); diff --git a/src/sinks/datadog/logs/integration_tests.rs b/src/sinks/datadog/logs/integration_tests.rs index 78156c1cf5c77..657275d8f46c2 100644 --- a/src/sinks/datadog/logs/integration_tests.rs +++ b/src/sinks/datadog/logs/integration_tests.rs @@ -25,7 +25,7 @@ async fn to_real_v2_endpoint() { let (sink, _) = config.build(cx).await.unwrap(); let (batch, receiver) = BatchNotifier::new_with_receiver(); - let generator = |index| format!("this is a log with index {}", index); + let generator = |index| format!("this is a log with index {index}"); let (_, events) = generate_lines_with_stream(generator, 10, Some(batch)); run_and_assert_sink_compliance(sink, events, &SINK_TAGS).await; diff --git a/src/sinks/datadog/logs/tests.rs b/src/sinks/datadog/logs/tests.rs index 386357f7e25be..7f29cd678829e 100644 --- a/src/sinks/datadog/logs/tests.rs +++ b/src/sinks/datadog/logs/tests.rs @@ -96,7 +96,7 @@ async fn start_test_detail( let addr = next_addr(); // Swap out the endpoint so we can force send it // to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); @@ -235,7 +235,7 @@ async fn api_key_in_metadata_inner(api_status: ApiStatus) { let addr = next_addr(); // Swap out the endpoint so we can force send it to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); @@ -247,7 +247,7 @@ async fn api_key_in_metadata_inner(api_status: ApiStatus) { let api_key = "0xDECAFBAD"; let events = events.map(|mut e| { - println!("EVENT: {:?}", e); + println!("EVENT: {e:?}"); e.iter_logs_mut().for_each(|log| { log.metadata_mut().set_datadog_api_key(Arc::from(api_key)); }); @@ -315,7 +315,7 @@ async fn multiple_api_keys_inner(api_status: ApiStatus) { let addr = next_addr(); // Swap out the endpoint so we can force send it // to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); @@ -377,7 +377,7 @@ async fn enterprise_headers_inner(api_status: ApiStatus) { let addr = next_addr(); // Swap out the endpoint so we can force send it to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); @@ -389,7 +389,7 @@ async fn enterprise_headers_inner(api_status: ApiStatus) { let api_key = "0xDECAFBAD"; let events = events.map(|mut e| { - println!("EVENT: {:?}", e); + println!("EVENT: {e:?}"); e.iter_logs_mut().for_each(|log| { log.metadata_mut().set_datadog_api_key(Arc::from(api_key)); }); @@ -440,7 +440,7 @@ async fn no_enterprise_headers_inner(api_status: ApiStatus) { let addr = next_addr(); // Swap out the endpoint so we can force send it to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); @@ -452,7 +452,7 @@ async fn no_enterprise_headers_inner(api_status: ApiStatus) { let api_key = "0xDECAFBAD"; let events = events.map(|mut e| { - println!("EVENT: {:?}", e); + println!("EVENT: {e:?}"); e.iter_logs_mut().for_each(|log| { log.metadata_mut().set_datadog_api_key(Arc::from(api_key)); }); diff --git a/src/sinks/datadog/metrics/config.rs b/src/sinks/datadog/metrics/config.rs index e07f0effd644d..1e144101d88f5 100644 --- a/src/sinks/datadog/metrics/config.rs +++ b/src/sinks/datadog/metrics/config.rs @@ -293,7 +293,7 @@ impl DatadogMetricsConfig { } fn build_uri(host: &str, endpoint: &str) -> crate::Result { - let result = format!("{}{}", host, endpoint) + let result = format!("{host}{endpoint}") .parse::() .context(UriParseSnafu)?; Ok(result) diff --git a/src/sinks/datadog/metrics/encoder.rs b/src/sinks/datadog/metrics/encoder.rs index 8deb6ef9c516e..e669e10447309 100644 --- a/src/sinks/datadog/metrics/encoder.rs +++ b/src/sinks/datadog/metrics/encoder.rs @@ -395,7 +395,7 @@ fn encode_tags(tags: &MetricTags) -> Vec { let mut pairs: Vec<_> = tags .iter_all() .map(|(name, value)| match value { - Some(value) => format!("{}:{}", name, value), + Some(value) => format!("{name}:{value}"), None => name.into(), }) .collect(); diff --git a/src/sinks/datadog/metrics/integration_tests.rs b/src/sinks/datadog/metrics/integration_tests.rs index c2fed5e958744..84f2b799b805b 100644 --- a/src/sinks/datadog/metrics/integration_tests.rs +++ b/src/sinks/datadog/metrics/integration_tests.rs @@ -62,7 +62,7 @@ async fn start_test( let addr = next_addr(); // Swap out the endpoint so we can force send it // to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); let (sink, _) = config.build(cx).await.unwrap(); diff --git a/src/sinks/datadog/metrics/service.rs b/src/sinks/datadog/metrics/service.rs index f107c7413bd9c..844a48731664b 100644 --- a/src/sinks/datadog/metrics/service.rs +++ b/src/sinks/datadog/metrics/service.rs @@ -48,7 +48,7 @@ impl RetryLogic for DatadogMetricsRetryLogic { format!("{}: {}", status, String::from_utf8_lossy(&response.body)).into(), ), _ if status.is_success() => RetryAction::Successful, - _ => RetryAction::DontRetry(format!("response status: {}", status).into()), + _ => RetryAction::DontRetry(format!("response status: {status}").into()), } } } diff --git a/src/sinks/datadog/traces/config.rs b/src/sinks/datadog/traces/config.rs index 24280570560c2..9f4f2570ab47d 100644 --- a/src/sinks/datadog/traces/config.rs +++ b/src/sinks/datadog/traces/config.rs @@ -278,7 +278,7 @@ impl SinkConfig for DatadogTracesConfig { } fn build_uri(host: &str, endpoint: &str) -> crate::Result { - let result = format!("{}{}", host, endpoint) + let result = format!("{host}{endpoint}") .parse::() .context(UriParseSnafu)?; Ok(result) diff --git a/src/sinks/datadog/traces/service.rs b/src/sinks/datadog/traces/service.rs index 509c909a87d4d..bf503d47ec88d 100644 --- a/src/sinks/datadog/traces/service.rs +++ b/src/sinks/datadog/traces/service.rs @@ -49,7 +49,7 @@ impl RetryLogic for TraceApiRetry { format!("{}: {}", status, String::from_utf8_lossy(&response.body)).into(), ), _ if status.is_success() => RetryAction::Successful, - _ => RetryAction::DontRetry(format!("response status: {}", status).into()), + _ => RetryAction::DontRetry(format!("response status: {status}").into()), } } } diff --git a/src/sinks/datadog_archives.rs b/src/sinks/datadog_archives.rs index 3d262f703d801..2c6daff44cb37 100644 --- a/src/sinks/datadog_archives.rs +++ b/src/sinks/datadog_archives.rs @@ -364,7 +364,7 @@ impl DatadogArchivesSinkConfig { match s3_options.storage_class { Some(class @ S3StorageClass::DeepArchive) | Some(class @ S3StorageClass::Glacier) => { return Err(ConfigError::UnsupportedStorageClass { - storage_class: format!("{:?}", class), + storage_class: format!("{class:?}"), }); } _ => (), @@ -1161,7 +1161,7 @@ mod tests { } else { assert_eq!( res.err().unwrap().to_string(), - format!(r#"Unsupported storage class: {:?}"#, class) + format!(r#"Unsupported storage class: {class:?}"#) ); } } diff --git a/src/sinks/elasticsearch/common.rs b/src/sinks/elasticsearch/common.rs index 101a3d590a11a..02f8d86c2ea6f 100644 --- a/src/sinks/elasticsearch/common.rs +++ b/src/sinks/elasticsearch/common.rs @@ -50,7 +50,7 @@ impl ElasticsearchCommon { version: &mut Option, ) -> crate::Result { // Test the configured host, but ignore the result - let uri = format!("{}/_test", endpoint); + let uri = format!("{endpoint}/_test"); let uri = uri .parse::() .with_context(|_| InvalidHostSnafu { host: endpoint })?; @@ -292,7 +292,7 @@ async fn get_version( "/_cluster/state/version", ) .await - .map_err(|error| format!("Failed to get Elasticsearch API version: {}", error))?; + .map_err(|error| format!("Failed to get Elasticsearch API version: {error}"))?; let (_, body) = response.into_parts(); let mut body = body::aggregate(body).await?; @@ -310,7 +310,7 @@ async fn get( client: HttpClient, path: &str, ) -> crate::Result> { - let mut builder = Request::get(format!("{}{}", base_url, path)); + let mut builder = Request::get(format!("{base_url}{path}")); if let Some(authorization) = &http_auth { builder = authorization.apply_builder(builder); diff --git a/src/sinks/elasticsearch/config.rs b/src/sinks/elasticsearch/config.rs index bbd7e44616c19..006135d8ae74d 100644 --- a/src/sinks/elasticsearch/config.rs +++ b/src/sinks/elasticsearch/config.rs @@ -459,7 +459,7 @@ impl DataStreamConfig { .or_else(|| self.namespace(log))?; (dtype, dataset, namespace) }; - Some(format!("{}-{}-{}", dtype, dataset, namespace)) + Some(format!("{dtype}-{dataset}-{namespace}")) } } diff --git a/src/sinks/elasticsearch/encoder.rs b/src/sinks/elasticsearch/encoder.rs index 8e0bde24255c0..3de1695c10e81 100644 --- a/src/sinks/elasticsearch/encoder.rs +++ b/src/sinks/elasticsearch/encoder.rs @@ -101,25 +101,22 @@ fn write_bulk_action( (Some(id), true) => { write!( writer, - r#"{{"{}":{{"_index":"{}","_id":"{}"}}}}"#, - bulk_action, index, id + r#"{{"{bulk_action}":{{"_index":"{index}","_id":"{id}"}}}}"# ) } (Some(id), false) => { write!( writer, - r#"{{"{}":{{"_index":"{}","_type":"{}","_id":"{}"}}}}"#, - bulk_action, index, doc_type, id + r#"{{"{bulk_action}":{{"_index":"{index}","_type":"{doc_type}","_id":"{id}"}}}}"# ) } (None, true) => { - write!(writer, r#"{{"{}":{{"_index":"{}"}}}}"#, bulk_action, index) + write!(writer, r#"{{"{bulk_action}":{{"_index":"{index}"}}}}"#) } (None, false) => { write!( writer, - r#"{{"{}":{{"_index":"{}","_type":"{}"}}}}"#, - bulk_action, index, doc_type + r#"{{"{bulk_action}":{{"_index":"{index}","_type":"{doc_type}"}}}}"# ) } }, diff --git a/src/sinks/elasticsearch/integration_tests.rs b/src/sinks/elasticsearch/integration_tests.rs index a10936240ca58..4ed7af3e51622 100644 --- a/src/sinks/elasticsearch/integration_tests.rs +++ b/src/sinks/elasticsearch/integration_tests.rs @@ -170,7 +170,7 @@ async fn structures_events_correctly() { flush(common).await.unwrap(); let response = reqwest::Client::new() - .get(&format!("{}/{}/_search", base_url, index)) + .get(&format!("{base_url}/{index}/_search")) .json(&json!({ "query": { "query_string": { "query": "*" } } })) @@ -582,7 +582,7 @@ async fn run_insert_tests_with_config( let client = create_http_client(); let mut response = client - .get(&format!("{}/{}/_search", base_url, index)) + .get(&format!("{base_url}/{index}/_search")) .basic_auth("elastic", Some("vector")) .json(&json!({ "query": { "query_string": { "query": "*" } } @@ -671,7 +671,7 @@ async fn run_insert_tests_with_multiple_endpoints(config: &ElasticsearchConfig) let mut total = 0; for base_url in base_urls { if let Ok(response) = client - .get(&format!("{}/{}/_search", base_url, index)) + .get(&format!("{base_url}/{index}/_search")) .basic_auth("elastic", Some("vector")) .json(&json!({ "query": { "query_string": { "query": "*" } } diff --git a/src/sinks/elasticsearch/mod.rs b/src/sinks/elasticsearch/mod.rs index 2121548ddac67..ef5ce6935b942 100644 --- a/src/sinks/elasticsearch/mod.rs +++ b/src/sinks/elasticsearch/mod.rs @@ -111,7 +111,7 @@ impl TryFrom<&str> for BulkAction { match input { "index" => Ok(BulkAction::Index), "create" => Ok(BulkAction::Create), - _ => Err(format!("Invalid bulk action: {}", input)), + _ => Err(format!("Invalid bulk action: {input}")), } } } diff --git a/src/sinks/elasticsearch/retry.rs b/src/sinks/elasticsearch/retry.rs index bd6035087c91e..2215049441823 100644 --- a/src/sinks/elasticsearch/retry.rs +++ b/src/sinks/elasticsearch/retry.rs @@ -17,10 +17,7 @@ struct EsResultResponse { impl EsResultResponse { fn parse(body: &str) -> Result { serde_json::from_str::(body).map_err(|json_error| { - format!( - "some messages failed, could not parse response, error: {}", - json_error - ) + format!("some messages failed, could not parse response, error: {json_error}") }) } @@ -45,7 +42,7 @@ impl EsResultResponse { .find_map(|item| item.result().error.as_ref()) { Some(error) => format!("error type: {}, reason: {}", error.err_type, error.reason), - None => format!("error response: {}", body), + None => format!("error response: {body}"), } } } @@ -112,7 +109,7 @@ impl RetryLogic for ElasticsearchRetryLogic { ), _ if status.is_client_error() => { let body = String::from_utf8_lossy(response.http_response.body()); - RetryAction::DontRetry(format!("client-side error, {}: {}", status, body).into()) + RetryAction::DontRetry(format!("client-side error, {status}: {body}").into()) } _ if status.is_success() => { let body = String::from_utf8_lossy(response.http_response.body()); @@ -136,7 +133,7 @@ impl RetryLogic for ElasticsearchRetryLogic { status, error.err_type, error.reason ) } else { - format!("partial error, status: {}", status) + format!("partial error, status: {status}") }; return RetryAction::Retry(msg.into()); } @@ -150,7 +147,7 @@ impl RetryLogic for ElasticsearchRetryLogic { RetryAction::Successful } } - _ => RetryAction::DontRetry(format!("response status: {}", status).into()), + _ => RetryAction::DontRetry(format!("response status: {status}").into()), } } } diff --git a/src/sinks/gcp/chronicle_unstructured.rs b/src/sinks/gcp/chronicle_unstructured.rs index b785dde1cf252..8d96074404ecc 100644 --- a/src/sinks/gcp/chronicle_unstructured.rs +++ b/src/sinks/gcp/chronicle_unstructured.rs @@ -611,13 +611,13 @@ mod integration_tests { async fn request(method: Method, path: &str, log_type: &str) -> Response { let address = std::env::var(ADDRESS_ENV_VAR).unwrap(); - let url = format!("{}/{}", address, path); + let url = format!("{address}/{path}"); Client::new() .request(method.clone(), &url) .query(&[("log_type", log_type)]) .send() .await - .unwrap_or_else(|_| panic!("Sending {} request to {} failed", method, url)) + .unwrap_or_else(|_| panic!("Sending {method} request to {url} failed")) } async fn pull_messages(log_type: &str) -> Vec { diff --git a/src/sinks/gcp/pubsub.rs b/src/sinks/gcp/pubsub.rs index 6bd881e0b49c6..acb1d87aa1a24 100644 --- a/src/sinks/gcp/pubsub.rs +++ b/src/sinks/gcp/pubsub.rs @@ -334,7 +334,7 @@ mod integration_tests { trace_init(); let (topic, _subscription) = create_topic_subscription().await; - let (sink, _healthcheck) = config_build(&format!("BREAK{}BREAK", topic)).await; + let (sink, _healthcheck) = config_build(&format!("BREAK{topic}BREAK")).await; // Explicitly skip healthcheck let (batch, mut receiver) = BatchNotifier::new_with_receiver(); @@ -348,7 +348,7 @@ mod integration_tests { trace_init(); let (topic, _subscription) = create_topic_subscription().await; - let topic = format!("BAD{}", topic); + let topic = format!("BAD{topic}"); let (_sink, healthcheck) = config_build(&topic).await; healthcheck.await.expect_err("Health check did not fail"); } @@ -356,15 +356,15 @@ mod integration_tests { async fn create_topic_subscription() -> (String, String) { let topic = format!("topic-{}", random_string(10)); let subscription = format!("subscription-{}", random_string(10)); - request(Method::PUT, &format!("topics/{}", topic), json!({})) + request(Method::PUT, &format!("topics/{topic}"), json!({})) .await .json::() .await .expect("Creating new topic failed"); request( Method::PUT, - &format!("subscriptions/{}", subscription), - json!({ "topic": format!("projects/{}/topics/{}", PROJECT, topic) }), + &format!("subscriptions/{subscription}"), + json!({ "topic": format!("projects/{PROJECT}/topics/{topic}") }), ) .await .json::() @@ -380,13 +380,13 @@ mod integration_tests { .json(&json) .send() .await - .unwrap_or_else(|_| panic!("Sending {} request to {} failed", method, url)) + .unwrap_or_else(|_| panic!("Sending {method} request to {url} failed")) } async fn pull_messages(subscription: &str, count: usize) -> PullResponse { request( Method::POST, - &format!("subscriptions/{}:pull", subscription), + &format!("subscriptions/{subscription}:pull"), json!({ "returnImmediately": true, "maxMessages": count diff --git a/src/sinks/gcp/stackdriver_logs.rs b/src/sinks/gcp/stackdriver_logs.rs index 557a691328426..48a43a39cd885 100644 --- a/src/sinks/gcp/stackdriver_logs.rs +++ b/src/sinks/gcp/stackdriver_logs.rs @@ -396,10 +396,10 @@ impl StackdriverConfig { let log_id = self.log_id.render_string(event)?; Ok(match &self.log_name { - BillingAccount(acct) => format!("billingAccounts/{}/logs/{}", acct, log_id), - Folder(folder) => format!("folders/{}/logs/{}", folder, log_id), - Organization(org) => format!("organizations/{}/logs/{}", org, log_id), - Project(project) => format!("projects/{}/logs/{}", project, log_id), + BillingAccount(acct) => format!("billingAccounts/{acct}/logs/{log_id}"), + Folder(folder) => format!("folders/{folder}/logs/{log_id}"), + Organization(org) => format!("organizations/{org}/logs/{log_id}"), + Project(project) => format!("projects/{project}/logs/{log_id}"), }) } } @@ -558,9 +558,7 @@ mod tests { assert_eq!( remap_severity(s.into()), Value::Integer(n), - "remap_severity({:?}) != {}", - s, - n + "remap_severity({s:?}) != {n}" ); } } diff --git a/src/sinks/gcs_common/config.rs b/src/sinks/gcs_common/config.rs index 51a73c2bfeb4c..763592a149889 100644 --- a/src/sinks/gcs_common/config.rs +++ b/src/sinks/gcs_common/config.rs @@ -158,7 +158,7 @@ impl RetryLogic for GcsRetryLogic { } _ if status.is_server_error() => RetryAction::Retry(status.to_string().into()), _ if status.is_success() => RetryAction::Successful, - _ => RetryAction::DontRetry(format!("response status: {}", status).into()), + _ => RetryAction::DontRetry(format!("response status: {status}").into()), } } } diff --git a/src/sinks/honeycomb.rs b/src/sinks/honeycomb.rs index ae489e008a9d9..14bdb0ff342bf 100644 --- a/src/sinks/honeycomb.rs +++ b/src/sinks/honeycomb.rs @@ -202,11 +202,7 @@ async fn healthcheck(config: HoneycombConfig, client: HttpClient) -> crate::Resu } else { let body = String::from_utf8_lossy(&body[..]); - Err(format!( - "Server returned unexpected error status: {} body: {}", - status, body - ) - .into()) + Err(format!("Server returned unexpected error status: {status} body: {body}").into()) } } #[cfg(test)] diff --git a/src/sinks/http.rs b/src/sinks/http.rs index 61d5f87206881..d8ad897d2a679 100644 --- a/src/sinks/http.rs +++ b/src/sinks/http.rs @@ -990,16 +990,13 @@ mod tests { let config = format!( r#" - uri = "http://{addr}/frames" + uri = "http://{in_addr}/frames" compression = "gzip" framing.method = "newline_delimited" encoding.codec = "json" method = "{method}" - {extras} + {extra_config} "#, - addr = in_addr, - extras = extra_config, - method = method ); let config: HttpSinkConfig = toml::from_str(&config).unwrap(); diff --git a/src/sinks/humio/logs.rs b/src/sinks/humio/logs.rs index 8988a227651b9..7e9bf05d4c772 100644 --- a/src/sinks/humio/logs.rs +++ b/src/sinks/humio/logs.rs @@ -471,7 +471,7 @@ mod integration_tests { humio_address(), repository_name ); - let search_query = format!(r#"message="{}""#, message); + let search_query = format!(r#"message="{message}""#); // events are not available to search API immediately // poll up 200 times for event to show up @@ -492,10 +492,7 @@ mod integration_tests { return logs[0].clone(); } } - panic!( - "did not find event in Humio repository {} with message {}", - repository_name, message - ); + panic!("did not find event in Humio repository {repository_name} with message {message}"); } #[derive(Debug)] diff --git a/src/sinks/humio/metrics.rs b/src/sinks/humio/metrics.rs index ece3a16d2cf6a..a9247852825f2 100644 --- a/src/sinks/humio/metrics.rs +++ b/src/sinks/humio/metrics.rs @@ -274,7 +274,7 @@ mod tests { let addr = test_util::next_addr(); // Swap out the endpoint so we can force send it // to our local server - config.endpoint = format!("http://{}", addr); + config.endpoint = format!("http://{addr}"); let (sink, _) = config.build(cx).await.unwrap(); @@ -340,7 +340,7 @@ mod tests { let addr = test_util::next_addr(); // Swap out the endpoint so we can force send it // to our local server - config.endpoint = format!("http://{}", addr); + config.endpoint = format!("http://{addr}"); let (sink, _) = config.build(cx).await.unwrap(); diff --git a/src/sinks/influxdb/logs.rs b/src/sinks/influxdb/logs.rs index 054b2e797986a..8b0acf612bbb3 100644 --- a/src/sinks/influxdb/logs.rs +++ b/src/sinks/influxdb/logs.rs @@ -268,7 +268,7 @@ impl InfluxDbLogsConfig { For example, you can use `measurement=.vector` for the \ same effect." ); - Ok(format!("{}.vector", namespace)) + Ok(format!("{namespace}.vector")) } (None, None) => Err("The `measurement` option is required."), } @@ -662,7 +662,7 @@ mod tests { let addr = next_addr(); // Swap out the host so we can force send it // to our local server - let host = format!("http://{}", addr); + let host = format!("http://{addr}"); config.endpoint = host; let (sink, _) = config.build(cx).await.unwrap(); @@ -681,7 +681,7 @@ mod tests { // Create 5 events with custom field for (i, line) in lines.iter().enumerate() { let mut event = LogEvent::from(line.to_string()).with_batch_notifier(&batch); - event.insert(format!("key{}", i).as_str(), format!("value{}", i)); + event.insert(format!("key{i}").as_str(), format!("value{i}")); let timestamp = Utc .ymd(1970, 1, 1) @@ -728,7 +728,7 @@ mod tests { assert_fields( line_protocol.2.to_string(), [ - &*format!("key{}=\"value{}\"", i, i), + &*format!("key{i}=\"value{i}\""), "message=\"message_value\"", ] .to_vec(), @@ -834,7 +834,7 @@ mod integration_tests { .unwrap(); let res = client - .post(format!("{}/api/v2/query?org=my-org", endpoint)) + .post(format!("{endpoint}/api/v2/query?org=my-org")) .json(&body) .header("accept", "application/json") .header("Authorization", "Token my-token") diff --git a/src/sinks/influxdb/metrics.rs b/src/sinks/influxdb/metrics.rs index cde8b4066345a..0c95eee9fa79c 100644 --- a/src/sinks/influxdb/metrics.rs +++ b/src/sinks/influxdb/metrics.rs @@ -216,7 +216,7 @@ fn create_build_request( token: &str, ) -> impl Fn(Bytes) -> BoxFuture<'static, crate::Result>> + Sync + Send + 'static { - let auth = format!("Token {}", token); + let auth = format!("Token {token}"); move |body| { Box::pin(ready( hyper::Request::post(uri.clone()) @@ -417,7 +417,7 @@ fn encode_distribution(samples: &[Sample], quantiles: &[f64]) -> Option unreachable!(), }; let timestamp = format_timestamp(metric.timestamp().unwrap(), SecondsFormat::Nanos); - let res = - query_v1_json(url, &format!("select * from {}..\"{}\"", database, name)).await; + let res = query_v1_json(url, &format!("select * from {database}..\"{name}\"")).await; assert_eq!( res, @@ -1119,7 +1118,7 @@ mod integration_tests { run_and_assert_sink_compliance(sink, stream::iter(events), &HTTP_SINK_TAGS).await; let mut body = std::collections::HashMap::new(); - body.insert("query", format!("from(bucket:\"my-bucket\") |> range(start: 0) |> filter(fn: (r) => r._measurement == \"ns.{}\")", metric)); + body.insert("query", format!("from(bucket:\"my-bucket\") |> range(start: 0) |> filter(fn: (r) => r._measurement == \"ns.{metric}\")")); body.insert("type", "flux".to_owned()); let client = reqwest::Client::builder() @@ -1167,7 +1166,7 @@ mod integration_tests { .position(|&r| r.trim() == "_measurement") .unwrap()] .trim(), - format!("ns.{}", metric) + format!("ns.{metric}") ); assert_eq!( record[header.iter().position(|&r| r.trim() == "_field").unwrap()].trim(), @@ -1182,7 +1181,7 @@ mod integration_tests { fn create_event(i: i32) -> Event { Event::Metric( Metric::new( - format!("counter-{}", i), + format!("counter-{i}"), MetricKind::Incremental, MetricValue::Counter { value: i as f64 }, ) diff --git a/src/sinks/influxdb/mod.rs b/src/sinks/influxdb/mod.rs index a4815707b5e19..4cd8a89270956 100644 --- a/src/sinks/influxdb/mod.rs +++ b/src/sinks/influxdb/mod.rs @@ -393,9 +393,7 @@ pub mod test_util { for field in fields.into_iter() { assert!( encoded_fields.contains(&field), - "Fields: {} has to have: {}", - value, - field + "Fields: {value} has to have: {field}" ) } } @@ -448,7 +446,7 @@ pub mod test_util { pub(crate) async fn query_v1(endpoint: &str, query: &str) -> reqwest::Response { client() - .get(&format!("{}/query", endpoint)) + .get(&format!("{endpoint}/query")) .query(&[("q", query)]) .send() .await @@ -457,10 +455,10 @@ pub mod test_util { pub(crate) async fn onboarding_v1(endpoint: &str) -> String { let database = next_database(); - let status = query_v1(endpoint, &format!("create database {}", database)) + let status = query_v1(endpoint, &format!("create database {database}")) .await .status(); - assert_eq!(status, http::StatusCode::OK, "UnexpectedStatus: {}", status); + assert_eq!(status, http::StatusCode::OK, "UnexpectedStatus: {status}"); // Some times InfluxDB will return OK before it can actually // accept writes to the database, leading to test failures. Test // this with empty writes and loop if it reports the database @@ -471,7 +469,7 @@ pub mod test_util { match client() .post(&write_url) .header("Content-Type", "text/plain") - .header("Authorization", &format!("Token {}", TOKEN)) + .header("Authorization", &format!("Token {TOKEN}")) .body("") .send() .await @@ -480,7 +478,7 @@ pub mod test_util { { http::StatusCode::NO_CONTENT => true, http::StatusCode::NOT_FOUND => false, - status => panic!("Unexpected status: {}", status), + status => panic!("Unexpected status: {status}"), } } }) @@ -489,10 +487,10 @@ pub mod test_util { } pub(crate) async fn cleanup_v1(endpoint: &str, database: &str) { - let status = query_v1(endpoint, &format!("drop database {}", database)) + let status = query_v1(endpoint, &format!("drop database {database}")) .await .status(); - assert_eq!(status, http::StatusCode::OK, "UnexpectedStatus: {}", status); + assert_eq!(status, http::StatusCode::OK, "UnexpectedStatus: {status}"); } pub(crate) async fn onboarding_v2(endpoint: &str) { @@ -509,7 +507,7 @@ pub mod test_util { .unwrap(); let res = client - .post(format!("{}/api/v2/setup", endpoint)) + .post(format!("{endpoint}/api/v2/setup")) .json(&body) .header("accept", "application/json") .send() @@ -520,8 +518,7 @@ pub mod test_util { assert!( status == StatusCode::CREATED || status == StatusCode::UNPROCESSABLE_ENTITY, - "UnexpectedStatus: {}", - status + "UnexpectedStatus: {status}" ); } diff --git a/src/sinks/kafka/config.rs b/src/sinks/kafka/config.rs index 1f4587508a73d..0cae5058614e8 100644 --- a/src/sinks/kafka/config.rs +++ b/src/sinks/kafka/config.rs @@ -131,9 +131,9 @@ impl KafkaSinkConfig { // Type: float let key = "queue.buffering.max.ms"; if let Some(val) = self.librdkafka_options.get(key) { - return Err(format!("Batching setting `batch.timeout_secs` sets `librdkafka_options.{}={}`.\ - The config already sets this as `librdkafka_options.queue.buffering.max.ms={}`.\ - Please delete one.", key, value, val).into()); + return Err(format!("Batching setting `batch.timeout_secs` sets `librdkafka_options.{key}={value}`.\ + The config already sets this as `librdkafka_options.queue.buffering.max.ms={val}`.\ + Please delete one.").into()); } debug!( librdkafka_option = key, @@ -149,9 +149,9 @@ impl KafkaSinkConfig { // Type: integer let key = "batch.num.messages"; if let Some(val) = self.librdkafka_options.get(key) { - return Err(format!("Batching setting `batch.max_events` sets `librdkafka_options.{}={}`.\ - The config already sets this as `librdkafka_options.batch.num.messages={}`.\ - Please delete one.", key, value, val).into()); + return Err(format!("Batching setting `batch.max_events` sets `librdkafka_options.{key}={value}`.\ + The config already sets this as `librdkafka_options.batch.num.messages={val}`.\ + Please delete one.").into()); } debug!( librdkafka_option = key, @@ -170,9 +170,9 @@ impl KafkaSinkConfig { // Type: integer let key = "batch.size"; if let Some(val) = self.librdkafka_options.get(key) { - return Err(format!("Batching setting `batch.max_bytes` sets `librdkafka_options.{}={}`.\ - The config already sets this as `librdkafka_options.batch.size={}`.\ - Please delete one.", key, value, val).into()); + return Err(format!("Batching setting `batch.max_bytes` sets `librdkafka_options.{key}={value}`.\ + The config already sets this as `librdkafka_options.batch.size={val}`.\ + Please delete one.").into()); } debug!( librdkafka_option = key, diff --git a/src/sinks/kafka/tests.rs b/src/sinks/kafka/tests.rs index 21babf9bda996..018cb8f2b4112 100644 --- a/src/sinks/kafka/tests.rs +++ b/src/sinks/kafka/tests.rs @@ -107,7 +107,7 @@ mod integration_test { let topic = format!("test-{}", random_string(10)); let config = KafkaSinkConfig { bootstrap_servers: kafka_address(9091), - topic: format!("{}-%Y%m%d", topic), + topic: format!("{topic}-%Y%m%d"), compression: KafkaCompression::None, encoding: TextSerializerConfig::default().into(), key_field: None, @@ -241,7 +241,7 @@ mod integration_test { let kafka_auth = KafkaAuthConfig { sasl, tls }; let config = KafkaSinkConfig { bootstrap_servers: server.clone(), - topic: format!("{}-%Y%m%d", topic), + topic: format!("{topic}-%Y%m%d"), key_field: None, encoding: TextSerializerConfig::default().into(), batch: BatchConfig::default(), @@ -254,7 +254,7 @@ mod integration_test { acknowledgements: Default::default(), }; let topic = format!("{}-{}", topic, chrono::Utc::now().format("%Y%m%d")); - println!("Topic name generated in test: {:?}", topic); + println!("Topic name generated in test: {topic:?}"); let num_events = 1000; let (batch, mut receiver) = BatchNotifier::new_with_receiver(); @@ -303,7 +303,7 @@ mod integration_test { || match consumer.fetch_watermarks(&topic, 0, Duration::from_secs(3)) { Ok((_low, high)) => ready(high > 0), Err(err) => { - println!("retrying due to error fetching watermarks: {}", err); + println!("retrying due to error fetching watermarks: {err}"); ready(false) } }, diff --git a/src/sinks/logdna.rs b/src/sinks/logdna.rs index ee8d222a83746..9f74e7d040193 100644 --- a/src/sinks/logdna.rs +++ b/src/sinks/logdna.rs @@ -322,7 +322,7 @@ impl LogdnaConfig { fn build_uri(&self, query: &str) -> Uri { let host = &self.endpoint.uri; - let uri = format!("{}{}?{}", host, PATH, query); + let uri = format!("{host}{PATH}?{query}"); uri.parse::() .expect("This should be a valid uri") @@ -437,7 +437,7 @@ mod tests { // Swap out the host so we can force send it // to our local server let endpoint = UriSerde { - uri: format!("http://{}", addr).parse::().unwrap(), + uri: format!("http://{addr}").parse::().unwrap(), auth: None, }; config.endpoint = endpoint; @@ -497,7 +497,7 @@ mod tests { let (p, host) = hosts .iter() .enumerate() - .find(|(_, host)| query.contains(&format!("hostname={}", host))) + .find(|(_, host)| query.contains(&format!("hostname={host}"))) .expect("invalid hostname"); let lines = &partitions[p]; diff --git a/src/sinks/loki/healthcheck.rs b/src/sinks/loki/healthcheck.rs index af09955269be7..6ac909dc0799d 100644 --- a/src/sinks/loki/healthcheck.rs +++ b/src/sinks/loki/healthcheck.rs @@ -31,6 +31,6 @@ pub async fn healthcheck(config: LokiConfig, client: HttpClient) -> crate::Resul match status { http::StatusCode::OK => Ok(()), - _ => Err(format!("A non-successful status returned: {}", status).into()), + _ => Err(format!("A non-successful status returned: {status}").into()), } } diff --git a/src/sinks/loki/integration_tests.rs b/src/sinks/loki/integration_tests.rs index 85716fbaa129f..a7ac3e5b8070f 100644 --- a/src/sinks/loki/integration_tests.rs +++ b/src/sinks/loki/integration_tests.rs @@ -84,7 +84,7 @@ async fn build_sink_with_compression(codec: &str, compression: &str) -> (uuid::U } fn line_generator(index: usize) -> String { - format!("random line {}", index) + format!("random line {index}") } fn event_generator(index: usize) -> Event { @@ -556,7 +556,7 @@ fn get_timestamp(event: &Event) -> DateTime { } async fn fetch_stream(stream: String, tenant: &str) -> (Vec, Vec) { - let query = format!("%7Btest_name%3D\"{}\"%7D", stream); + let query = format!("%7Btest_name%3D\"{stream}\"%7D"); let query = format!( "{}/loki/api/v1/query_range?query={}&direction=forward", loki_address(), diff --git a/src/sinks/loki/sink.rs b/src/sinks/loki/sink.rs index acf653481d4c9..3f511c8ca70f4 100644 --- a/src/sinks/loki/sink.rs +++ b/src/sinks/loki/sink.rs @@ -179,7 +179,7 @@ impl EventEncoder { // key_* -> key_one, key_two, key_three for (k, v) in output { vec.push(( - slugify_text(format!("{}{}", opening_prefix, k)), + slugify_text(format!("{opening_prefix}{k}")), Value::from(v).to_string_lossy().into_owned(), )) } diff --git a/src/sinks/loki/tests.rs b/src/sinks/loki/tests.rs index 5661b0c6ec8b8..502357ca6598d 100644 --- a/src/sinks/loki/tests.rs +++ b/src/sinks/loki/tests.rs @@ -101,7 +101,7 @@ async fn healthcheck_includes_auth() { .unwrap(); let addr = test_util::next_addr(); - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = endpoint .clone() .parse::() diff --git a/src/sinks/nats.rs b/src/sinks/nats.rs index 37295fa96e100..a82b97782a205 100644 --- a/src/sinks/nats.rs +++ b/src/sinks/nats.rs @@ -317,8 +317,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -376,8 +375,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -406,8 +404,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -436,8 +433,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -467,8 +463,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -498,8 +493,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Config { .. })), - "publish_and_check failed, expected BuildError::Config, got: {:?}", - r + "publish_and_check failed, expected BuildError::Config, got: {r:?}" ); } @@ -530,8 +524,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -556,8 +549,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -590,8 +582,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -622,8 +613,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -658,8 +648,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -694,8 +683,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } } diff --git a/src/sinks/new_relic/encoding.rs b/src/sinks/new_relic/encoding.rs index ec71aa4f40879..3e86128e48160 100644 --- a/src/sinks/new_relic/encoding.rs +++ b/src/sinks/new_relic/encoding.rs @@ -33,8 +33,7 @@ pub fn to_json(model: &T) -> Result, NewRelicSinkError> { Ok(json) } Err(error) => Err(NewRelicSinkError::new(&format!( - "Failed generating JSON: {}", - error + "Failed generating JSON: {error}" ))), } } diff --git a/src/sinks/papertrail.rs b/src/sinks/papertrail.rs index b16a7b7c96f6b..3377b882e8d8f 100644 --- a/src/sinks/papertrail.rs +++ b/src/sinks/papertrail.rs @@ -83,7 +83,7 @@ impl SinkConfig for PapertrailConfig { .port_u16() .ok_or_else(|| "A port is required for endpoint".to_string())?; - let address = format!("{}:{}", host, port); + let address = format!("{host}:{port}"); let tls = Some( self.tls .clone() @@ -169,7 +169,7 @@ impl tokio_util::codec::Encoder for PapertrailEncoder { formatter .format(&mut buffer.writer(), Severity::LOG_INFO, message) - .map_err(|error| Self::Error::SerializingError(format!("{}", error).into()))?; + .map_err(|error| Self::Error::SerializingError(format!("{error}").into()))?; buffer.put_u8(b'\n'); diff --git a/src/sinks/prometheus/collector.rs b/src/sinks/prometheus/collector.rs index 332e9def568fc..67afedf36d6d7 100644 --- a/src/sinks/prometheus/collector.rs +++ b/src/sinks/prometheus/collector.rs @@ -256,8 +256,8 @@ impl MetricCollector for StringCollector { result.push_str(suffix); Self::encode_tags(result, tags, extra); let _ = match timestamp_millis { - None => writeln!(result, " {}", value), - Some(timestamp) => writeln!(result, " {} {}", value, timestamp), + None => writeln!(result, " {value}"), + Some(timestamp) => writeln!(result, " {value} {timestamp}"), }; } @@ -290,10 +290,7 @@ impl StringCollector { fn encode_header(name: &str, fullname: &str, value: &MetricValue) -> String { let r#type = prometheus_metric_type(value).as_str(); - format!( - "# HELP {} {}\n# TYPE {} {}\n", - fullname, name, fullname, r#type - ) + format!("# HELP {fullname} {name}\n# TYPE {fullname} {type}\n") } fn format_tag(key: &str, mut value: &str) -> String { diff --git a/src/sinks/prometheus/exporter.rs b/src/sinks/prometheus/exporter.rs index 018af036d5b77..46f23a356dcbf 100644 --- a/src/sinks/prometheus/exporter.rs +++ b/src/sinks/prometheus/exporter.rs @@ -900,7 +900,7 @@ mod tests { // Events are marked as delivered as soon as they are aggregated. assert_eq!(receiver.try_recv(), Ok(BatchStatus::Delivered)); - let request = Request::get(format!("{}://{}/metrics", proto, address)) + let request = Request::get(format!("{proto}://{address}/metrics")) .body(Body::empty()) .expect("Error creating request."); let proxy = ProxyConfig::default(); @@ -964,7 +964,7 @@ mod tests { // Events are marked as delivered as soon as they are aggregated. assert_eq!(receiver.try_recv(), Ok(BatchStatus::Delivered)); - let mut request = Request::get(format!("{}://{}/metrics", proto, address)) + let mut request = Request::get(format!("{proto}://{address}/metrics")) .body(Body::empty()) .expect("Error creating request."); diff --git a/src/sinks/prometheus/remote_write.rs b/src/sinks/prometheus/remote_write.rs index 6d0d07bd42183..88a3c5c0de098 100644 --- a/src/sinks/prometheus/remote_write.rs +++ b/src/sinks/prometheus/remote_write.rs @@ -555,7 +555,7 @@ mod tests { let (rx, trigger, server) = build_test_server(addr); tokio::spawn(server); - let config = format!("endpoint = \"http://{}/write\"\n{}", addr, config); + let config = format!("endpoint = \"http://{addr}/write\"\n{config}"); let config: RemoteWriteConfig = toml::from_str(&config).unwrap(); let cx = SinkContext::new_test(); @@ -645,7 +645,7 @@ mod integration_tests { let cx = SinkContext::new_test(); let config = RemoteWriteConfig { - endpoint: format!("{}/api/v1/prom/write?db={}", url, database), + endpoint: format!("{url}/api/v1/prom/write?db={database}"), tls: Some(TlsConfig { ca_file: Some(tls::TEST_PEM_CA_PATH.into()), ..Default::default() @@ -657,7 +657,7 @@ mod integration_tests { let (sink, _) = config.build(cx).await.expect("error building config"); sink.run_events(events.clone()).await.unwrap(); - let result = query(url, &format!("show series on {}", database)).await; + let result = query(url, &format!("show series on {database}")).await; let values = &result["results"][0]["series"][0]["values"]; assert_eq!(values.as_array().unwrap().len(), 5); @@ -718,7 +718,7 @@ mod integration_tests { fn create_events(name_range: Range, value: impl Fn(f64) -> f64) -> Vec { name_range - .map(move |num| create_event(format!("metric_{}", num), value(num as f64))) + .map(move |num| create_event(format!("metric_{num}"), value(num as f64))) .collect() } } diff --git a/src/sinks/pulsar.rs b/src/sinks/pulsar.rs index 9cc7c3e6bbd77..cd10f41a402de 100644 --- a/src/sinks/pulsar.rs +++ b/src/sinks/pulsar.rs @@ -282,7 +282,7 @@ impl PulsarSinkConfig { if let Some(producer_name) = self.producer_name.clone() { pulsar_builder = pulsar_builder.with_name(if is_healthcheck { - format!("{}-healthcheck", producer_name) + format!("{producer_name}-healthcheck") } else { producer_name }); @@ -554,7 +554,7 @@ mod integration_tests { for line in input { let msg = match consumer.next().await.unwrap() { Ok(msg) => msg, - Err(error) => panic!("{:?}", error), + Err(error) => panic!("{error:?}"), }; consumer.ack(&msg).await.unwrap(); assert_eq!(String::from_utf8_lossy(&msg.payload.data), line); diff --git a/src/sinks/redis.rs b/src/sinks/redis.rs index ff08a98f683a9..6cd4e5f619bb4 100644 --- a/src/sinks/redis.rs +++ b/src/sinks/redis.rs @@ -633,7 +633,7 @@ mod integration_tests { pubsub_conn .subscribe(key.clone().to_string()) .await - .unwrap_or_else(|_| panic!("Failed to subscribe channel:{}.", key)); + .unwrap_or_else(|_| panic!("Failed to subscribe channel:{key}.")); debug!("Subscribed to channel:{}.", key); let mut pubsub_stream = pubsub_conn.on_message(); diff --git a/src/sinks/s3_common/config.rs b/src/sinks/s3_common/config.rs index fd4d85da4e1cf..8ccf96ec44fb4 100644 --- a/src/sinks/s3_common/config.rs +++ b/src/sinks/s3_common/config.rs @@ -345,10 +345,8 @@ mod tests { ("STANDARD_IA", S3StorageClass::StandardIa), ] { assert_eq!(name, to_string(storage_class)); - let result: S3StorageClass = serde_json::from_str(&format!("{:?}", name)) - .unwrap_or_else(|error| { - panic!("Unparsable storage class name {:?}: {}", name, error) - }); + let result: S3StorageClass = serde_json::from_str(&format!("{name:?}")) + .unwrap_or_else(|error| panic!("Unparsable storage class name {name:?}: {error}")); assert_eq!(result, storage_class); } } diff --git a/src/sinks/sematext/logs.rs b/src/sinks/sematext/logs.rs index a6b92ebb3efc6..9d9e0dd413363 100644 --- a/src/sinks/sematext/logs.rs +++ b/src/sinks/sematext/logs.rs @@ -185,7 +185,7 @@ mod tests { let addr = next_addr(); // Swap out the host so we can force send it // to our local server - config.endpoint = Some(format!("http://{}", addr)); + config.endpoint = Some(format!("http://{addr}")); config.region = None; let (sink, _) = config.build(cx).await.unwrap(); diff --git a/src/sinks/sematext/metrics.rs b/src/sinks/sematext/metrics.rs index 5d1d986c23af4..f76ad5f9c9885 100644 --- a/src/sinks/sematext/metrics.rs +++ b/src/sinks/sematext/metrics.rs @@ -94,7 +94,7 @@ impl GenerateConfig for SematextMetricsConfig { } async fn healthcheck(endpoint: String, client: HttpClient) -> Result<()> { - let uri = format!("{}/health", endpoint); + let uri = format!("{endpoint}/health"); let request = Request::get(uri) .body(Body::empty()) @@ -409,7 +409,7 @@ mod tests { let addr = next_addr(); // Swap out the endpoint so we can force send it // to our local server - let endpoint = format!("http://{}", addr); + let endpoint = format!("http://{addr}"); config.endpoint = Some(endpoint.clone()); config.region = None; diff --git a/src/sinks/splunk_hec/common/service.rs b/src/sinks/splunk_hec/common/service.rs index d720f6c26c6ff..dadac2b30e0ba 100644 --- a/src/sinks/splunk_hec/common/service.rs +++ b/src/sinks/splunk_hec/common/service.rs @@ -364,10 +364,7 @@ mod tests { Mock::given(method("POST")) .and(path("/services/collector/event")) - .and(header( - "Authorization", - format!("Splunk {}", TOKEN).as_str(), - )) + .and(header("Authorization", format!("Splunk {TOKEN}").as_str())) .and(header_exists("X-Splunk-Request-Channel")) .respond_with(move |_: &Request| { let ack_id = @@ -379,10 +376,7 @@ mod tests { Mock::given(method("POST")) .and(path("/services/collector/ack")) - .and(header( - "Authorization", - format!("Splunk {}", TOKEN).as_str(), - )) + .and(header("Authorization", format!("Splunk {TOKEN}").as_str())) .and(header_exists("X-Splunk-Request-Channel")) .respond_with(ack_response) .mount(&mock_server) @@ -522,10 +516,7 @@ mod tests { // Override the usual event endpoint Mock::given(method("POST")) .and(path("/services/collector/event")) - .and(header( - "Authorization", - format!("Splunk {}", TOKEN).as_str(), - )) + .and(header("Authorization", format!("Splunk {TOKEN}").as_str())) .and(header_exists("X-Splunk-Request-Channel")) .respond_with(move |_: &Request| { ResponseTemplate::new(200).set_body_json(r#"{ "new": "a new response body" }"#) diff --git a/src/sinks/splunk_hec/common/util.rs b/src/sinks/splunk_hec/common/util.rs index ec6415ad52db1..f41f2bf505076 100644 --- a/src/sinks/splunk_hec/common/util.rs +++ b/src/sinks/splunk_hec/common/util.rs @@ -88,7 +88,7 @@ pub async fn build_healthcheck( .context(UriParseSnafu)?; let request = Request::get(uri) - .header("Authorization", format!("Splunk {}", token)) + .header("Authorization", format!("Splunk {token}")) .body(Body::empty()) .unwrap(); diff --git a/src/sinks/splunk_hec/logs/integration_tests.rs b/src/sinks/splunk_hec/logs/integration_tests.rs index 13ecaf7a2d30c..997210488f4e4 100644 --- a/src/sinks/splunk_hec/logs/integration_tests.rs +++ b/src/sinks/splunk_hec/logs/integration_tests.rs @@ -41,7 +41,7 @@ async fn recent_entries(index: Option<&str>) -> Vec { // https://docs.splunk.com/Documentation/Splunk/7.2.1/RESTREF/RESTsearch#search.2Fjobs let search_query = match index { - Some(index) => format!("search index={}", index), + Some(index) => format!("search index={index}"), None => "search index=*".into(), }; let res = client @@ -441,7 +441,7 @@ async fn splunk_auto_extracted_timestamp() { let entry = find_entry(message).await; assert_eq!( - format!("{{\"message\":\"{}\"}}", message), + format!("{{\"message\":\"{message}\"}}"), entry["_raw"].as_str().unwrap() ); assert_eq!( @@ -487,7 +487,7 @@ async fn splunk_non_auto_extracted_timestamp() { let entry = find_entry(message).await; assert_eq!( - format!("{{\"message\":\"{}\"}}", message), + format!("{{\"message\":\"{message}\"}}"), entry["_raw"].as_str().unwrap() ); assert_eq!( diff --git a/src/sinks/splunk_hec/logs/tests.rs b/src/sinks/splunk_hec/logs/tests.rs index 4d7f74a1e5178..f24dab4d05d0d 100644 --- a/src/sinks/splunk_hec/logs/tests.rs +++ b/src/sinks/splunk_hec/logs/tests.rs @@ -190,7 +190,7 @@ async fn splunk_passthrough_token() { let addr = next_addr(); let config = HecLogsSinkConfig { default_token: "token".to_string().into(), - endpoint: format!("http://{}", addr), + endpoint: format!("http://{addr}"), host_key: "host".into(), indexed_fields: Vec::new(), index: None, diff --git a/src/sinks/splunk_hec/metrics/tests.rs b/src/sinks/splunk_hec/metrics/tests.rs index dc05ff8bff1c9..0dc3a25628a09 100644 --- a/src/sinks/splunk_hec/metrics/tests.rs +++ b/src/sinks/splunk_hec/metrics/tests.rs @@ -318,7 +318,7 @@ async fn splunk_passthrough_token() { let addr = next_addr(); let config = HecMetricsSinkConfig { default_token: "token".to_owned().into(), - endpoint: format!("http://{}", addr), + endpoint: format!("http://{addr}"), host_key: "host".into(), index: None, sourcetype: None, diff --git a/src/sinks/statsd.rs b/src/sinks/statsd.rs index 0308483d4b5eb..80a74a4c2ea2d 100644 --- a/src/sinks/statsd.rs +++ b/src/sinks/statsd.rs @@ -175,7 +175,7 @@ fn encode_tags(tags: &MetricTags) -> String { let parts: Vec<_> = tags .iter_all() .map(|(name, tag_value)| match tag_value { - Some(value) => format!("{}:{}", name, value), + Some(value) => format!("{name}:{value}"), None => name.to_owned(), }) .collect(); @@ -223,7 +223,7 @@ impl Encoder for StatsdEncoder { MetricValue::Gauge { value } => { match metric.kind() { MetricKind::Incremental => { - push_event(&mut buf, metric, format!("{:+}", value), "g", None) + push_event(&mut buf, metric, format!("{value:+}"), "g", None) } MetricKind::Absolute => push_event(&mut buf, metric, value, "g", None), }; diff --git a/src/sinks/util/adaptive_concurrency/tests.rs b/src/sinks/util/adaptive_concurrency/tests.rs index 5da3259d117cb..0b8702738d3b1 100644 --- a/src/sinks/util/adaptive_concurrency/tests.rs +++ b/src/sinks/util/adaptive_concurrency/tests.rs @@ -185,7 +185,7 @@ impl SinkConfig for TestConfig { batch_settings.timeout, ) .with_flat_map(|event| stream::iter(Some(Ok(EncodedEvent::new(event, 0))))) - .sink_map_err(|error| panic!("Fatal test sink error: {}", error)); + .sink_map_err(|error| panic!("Fatal test sink error: {error}")); let healthcheck = future::ok(()).boxed(); // Dig deep to get at the internal controller statistics @@ -512,14 +512,14 @@ impl Range { fn assert_usize(&self, value: usize, name1: &str, name2: &str) -> Option { if value < self.0 as usize { Some(Failure { - stat_name: format!("{} {}", name1, name2), + stat_name: format!("{name1} {name2}"), mode: FailureMode::ExceededMinimum, value: value as f64, reference: self.0, }) } else if value > self.1 as usize { Some(Failure { - stat_name: format!("{} {}", name1, name2), + stat_name: format!("{name1} {name2}"), mode: FailureMode::ExceededMaximum, value: value as f64, reference: self.1, @@ -532,14 +532,14 @@ impl Range { fn assert_f64(&self, value: f64, name1: &str, name2: &str) -> Option { if value < self.0 { Some(Failure { - stat_name: format!("{} {}", name1, name2), + stat_name: format!("{name1} {name2}"), mode: FailureMode::ExceededMinimum, value, reference: self.0, }) } else if value > self.1 { Some(Failure { - stat_name: format!("{} {}", name1, name2), + stat_name: format!("{name1} {name2}"), mode: FailureMode::ExceededMaximum, value, reference: self.1, @@ -612,7 +612,7 @@ struct TestInput { } async fn run_compare(file_path: PathBuf, input: TestInput) { - eprintln!("Running test in {:?}", file_path); + eprintln!("Running test in {file_path:?}"); let results = run_test(input.params).await; @@ -658,7 +658,7 @@ async fn run_compare(file_path: PathBuf, input: TestInput) { failure.stat_name, failure.value, mode, failure.reference ); } - assert!(failures.is_empty(), "{:#?}", results); + assert!(failures.is_empty(), "{results:#?}"); } #[tokio::test] @@ -677,7 +677,7 @@ async fn all_tests() { .read_to_string(&mut data) .unwrap(); let input: TestInput = toml::from_str(&data) - .unwrap_or_else(|error| panic!("Invalid TOML in {:?}: {:?}", file_path, error)); + .unwrap_or_else(|error| panic!("Invalid TOML in {file_path:?}: {error:?}")); Some((file_path, input)) } else { None diff --git a/src/sinks/util/buffer/metrics/mod.rs b/src/sinks/util/buffer/metrics/mod.rs index 9e1e98f28d2bc..336aa3ed9a9c7 100644 --- a/src/sinks/util/buffer/metrics/mod.rs +++ b/src/sinks/util/buffer/metrics/mod.rs @@ -140,7 +140,7 @@ pub(self) mod tests { pub fn sample_counter(num: usize, tagstr: &str, kind: MetricKind, value: f64) -> Metric { Metric::new( - format!("counter-{}", num), + format!("counter-{num}"), kind, MetricValue::Counter { value }, ) @@ -148,12 +148,12 @@ pub(self) mod tests { } pub fn sample_gauge(num: usize, kind: MetricKind, value: f64) -> Metric { - Metric::new(format!("gauge-{}", num), kind, MetricValue::Gauge { value }) + Metric::new(format!("gauge-{num}"), kind, MetricValue::Gauge { value }) } pub fn sample_set(num: usize, kind: MetricKind, values: &[T]) -> Metric { Metric::new( - format!("set-{}", num), + format!("set-{num}"), kind, MetricValue::Set { values: values.iter().map(|s| s.to_string()).collect(), @@ -163,7 +163,7 @@ pub(self) mod tests { pub fn sample_distribution_histogram(num: u32, kind: MetricKind, rate: u32) -> Metric { Metric::new( - format!("dist-{}", num), + format!("dist-{num}"), kind, MetricValue::Distribution { samples: vector_core::samples![num as f64 => rate], @@ -180,7 +180,7 @@ pub(self) mod tests { sum: f64, ) -> Metric { Metric::new( - format!("buckets-{}", num), + format!("buckets-{num}"), kind, MetricValue::AggregatedHistogram { buckets: vector_core::buckets![ @@ -196,7 +196,7 @@ pub(self) mod tests { pub fn sample_aggregated_summary(num: u32, kind: MetricKind, factor: f64) -> Metric { Metric::new( - format!("quantiles-{}", num), + format!("quantiles-{num}"), kind, MetricValue::AggregatedSummary { quantiles: vector_core::quantiles![ @@ -241,7 +241,7 @@ pub(self) mod tests { result .into_iter() .map(|mut batch| { - batch.sort_by_key(|k| format!("{:?}", k)); + batch.sort_by_key(|k| format!("{k:?}")); batch }) .collect() diff --git a/src/sinks/util/http.rs b/src/sinks/util/http.rs index fcc9c3d0f8158..829a251919050 100644 --- a/src/sinks/util/http.rs +++ b/src/sinks/util/http.rs @@ -461,7 +461,7 @@ impl RetryLogic for HttpRetryLogic { format!("{}: {}", status, String::from_utf8_lossy(response.body())).into(), ), _ if status.is_success() => RetryAction::Successful, - _ => RetryAction::DontRetry(format!("response status: {}", status).into()), + _ => RetryAction::DontRetry(format!("response status: {status}").into()), } } } @@ -508,10 +508,10 @@ where RetryAction::DontRetry("endpoint not implemented".into()) } _ if status.is_server_error() => { - RetryAction::Retry(format!("Http Status: {}", status).into()) + RetryAction::Retry(format!("Http Status: {status}").into()) } _ if status.is_success() => RetryAction::Successful, - _ => RetryAction::DontRetry(format!("Http status: {}", status).into()), + _ => RetryAction::DontRetry(format!("Http status: {status}").into()), } } } @@ -653,7 +653,7 @@ mod test { async move { let mut body = hyper::body::aggregate(req.into_body()) .await - .map_err(|error| format!("error: {}", error))?; + .map_err(|error| format!("error: {error}"))?; let string = String::from_utf8(body.copy_to_bytes(body.remaining()).to_vec()) .map_err(|_| "Wasn't UTF-8".to_string())?; tx.try_send(string).map_err(|_| "Send error".to_string())?; @@ -667,7 +667,7 @@ mod test { tokio::spawn(async move { if let Err(error) = Server::bind(&addr).serve(new_service).await { - eprintln!("Server error: {}", error); + eprintln!("Server error: {error}"); } }); diff --git a/src/sinks/util/mod.rs b/src/sinks/util/mod.rs index 591e822ac557c..2dd36dee9a94a 100644 --- a/src/sinks/util/mod.rs +++ b/src/sinks/util/mod.rs @@ -110,7 +110,7 @@ pub fn encode_namespace<'a>( ) -> String { let name = name.into(); namespace - .map(|namespace| format!("{}{}{}", namespace, delimiter, name)) + .map(|namespace| format!("{namespace}{delimiter}{name}")) .unwrap_or_else(|| name.into_owned()) } diff --git a/src/sinks/util/sink.rs b/src/sinks/util/sink.rs index 391839b478c30..361811d3619c6 100644 --- a/src/sinks/util/sink.rs +++ b/src/sinks/util/sink.rs @@ -1121,7 +1121,7 @@ mod tests { impl Partition for Partitions { fn partition(&self) -> Bytes { - format!("{:?}", self).into() + format!("{self:?}").into() } } diff --git a/src/sinks/util/test.rs b/src/sinks/util/test.rs index b5b1a2ae461bd..0706c370c36b6 100644 --- a/src/sinks/util/test.rs +++ b/src/sinks/util/test.rs @@ -93,7 +93,7 @@ where let server = Server::bind(&addr) .serve(service) .with_graceful_shutdown(tripwire.then(crate::shutdown::tripwire_handler)) - .map_err(|error| panic!("Server error: {}", error)); + .map_err(|error| panic!("Server error: {error}")); (rx, trigger, server) } diff --git a/src/sinks/util/uri.rs b/src/sinks/util/uri.rs index 34f82f03a8326..a800965bedb9a 100644 --- a/src/sinks/util/uri.rs +++ b/src/sinks/util/uri.rs @@ -47,7 +47,7 @@ impl UriSerde { let uri = self.uri.to_string(); let self_path = uri.trim_end_matches('/'); let other_path = path.trim_start_matches('/'); - let path = format!("{}/{}", self_path, other_path); + let path = format!("{self_path}/{other_path}"); let uri = path.parse::()?; Ok(Self { uri, @@ -80,7 +80,7 @@ impl fmt::Display for UriSerde { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match (self.uri.authority(), &self.auth) { (Some(authority), Some(Auth::Basic { user, password })) => { - let authority = format!("{}:{}@{}", user, password, authority); + let authority = format!("{user}:{password}@{authority}"); let authority = Authority::from_maybe_shared(authority).map_err(|_| std::fmt::Error)?; let mut parts = self.uri.clone().into_parts(); @@ -119,7 +119,7 @@ impl From for UriSerde { fn get_basic_auth(authority: &Authority) -> (Authority, Option) { // We get a valid `Authority` as input, therefore cannot fail here. - let mut url = url::Url::parse(&format!("http://{}", authority)).expect("invalid authority"); + let mut url = url::Url::parse(&format!("http://{authority}")).expect("invalid authority"); let user = url.username(); if !user.is_empty() { @@ -164,7 +164,7 @@ pub fn protocol_endpoint(uri: Uri) -> (String, String) { let host = auth.host(); match auth.port() { None => host.to_string(), - Some(port) => format!("{}:{}", host, port), + Some(port) => format!("{host}:{port}"), } .parse() .unwrap_or_else(|_| unreachable!()) diff --git a/src/sinks/vector/mod.rs b/src/sinks/vector/mod.rs index 99a4c5bf8a2ab..2eee8d0bf6618 100644 --- a/src/sinks/vector/mod.rs +++ b/src/sinks/vector/mod.rs @@ -74,7 +74,7 @@ mod tests { let in_addr = next_addr(); - let config = format!(r#"address = "http://{}/""#, in_addr); + let config = format!(r#"address = "http://{in_addr}/""#); let config: VectorConfig = toml::from_str(&config).unwrap(); let cx = SinkContext::new_test(); @@ -118,7 +118,7 @@ mod tests { let in_addr = next_addr(); - let config = format!(r#"address = "http://{}/""#, in_addr); + let config = format!(r#"address = "http://{in_addr}/""#); let config: VectorConfig = toml::from_str(&config).unwrap(); let cx = SinkContext::new_test(); diff --git a/src/sinks/websocket/sink.rs b/src/sinks/websocket/sink.rs index 9bf711dda5f7a..e01d98809ecbc 100644 --- a/src/sinks/websocket/sink.rs +++ b/src/sinks/websocket/sink.rs @@ -400,7 +400,7 @@ mod tests { let addr = next_addr(); let config = WebSocketSinkConfig { - uri: format!("ws://{}", addr), + uri: format!("ws://{addr}"), tls: None, encoding: JsonSerializerConfig::default().into(), ping_interval: None, @@ -423,7 +423,7 @@ mod tests { let auth_clone = auth.clone(); let addr = next_addr(); let config = WebSocketSinkConfig { - uri: format!("ws://{}", addr), + uri: format!("ws://{addr}"), tls: None, encoding: JsonSerializerConfig::default().into(), ping_interval: None, @@ -445,7 +445,7 @@ mod tests { let tls = MaybeTlsSettings::from_config(&tls_config, true).unwrap(); let config = WebSocketSinkConfig { - uri: format!("wss://{}", addr), + uri: format!("wss://{addr}"), tls: Some(TlsEnableableConfig { enabled: Some(true), options: TlsConfig { @@ -471,7 +471,7 @@ mod tests { let addr = next_addr(); let config = WebSocketSinkConfig { - uri: format!("ws://{}", addr), + uri: format!("ws://{addr}"), tls: None, encoding: JsonSerializerConfig::default().into(), ping_interval: None, @@ -491,7 +491,7 @@ mod tests { time::sleep(Duration::from_millis(10)).await; event }); - let _ = tokio::spawn(sink.run(events)); + drop(tokio::spawn(sink.run(events))); receiver.connected().await; time::sleep(Duration::from_millis(500)).await; diff --git a/src/sources/amqp.rs b/src/sources/amqp.rs index d090520f4e760..670dc5fc62ced 100644 --- a/src/sources/amqp.rs +++ b/src/sources/amqp.rs @@ -504,8 +504,7 @@ pub mod test { let user = std::env::var("AMQP_USER").unwrap_or_else(|_| "guest".to_string()); let pass = std::env::var("AMQP_PASSWORD").unwrap_or_else(|_| "guest".to_string()); let vhost = std::env::var("AMQP_VHOST").unwrap_or_else(|_| "%2f".to_string()); - config.connection.connection_string = - format!("amqp://{}:{}@rabbitmq:5672/{}", user, pass, vhost); + config.connection.connection_string = format!("amqp://{user}:{pass}@rabbitmq:5672/{vhost}"); config } diff --git a/src/sources/apache_metrics/mod.rs b/src/sources/apache_metrics/mod.rs index 056763799a187..c499e2ec34b71 100644 --- a/src/sources/apache_metrics/mod.rs +++ b/src/sources/apache_metrics/mod.rs @@ -363,7 +363,7 @@ Scoreboard: ____S_____I______R____I_______KK___D__C__G_L____________W___________ wait_for_tcp(in_addr).await; let config = ApacheMetricsConfig { - endpoints: vec![format!("http://foo:bar@{}/metrics", in_addr)], + endpoints: vec![format!("http://foo:bar@{in_addr}/metrics")], scrape_interval_secs: Duration::from_secs(1), namespace: "custom".to_string(), }; @@ -387,7 +387,7 @@ Scoreboard: ____S_____I______R____I_______KK___D__C__G_L____________W___________ Some(tags) => { assert_eq!( tags.get("endpoint"), - Some(&format!("http://{}/metrics", in_addr)[..]) + Some(&format!("http://{in_addr}/metrics")[..]) ); assert_eq!(tags.get("host"), Some(&in_addr.to_string()[..])); } @@ -423,7 +423,7 @@ Scoreboard: ____S_____I______R____I_______KK___D__C__G_L____________W___________ let (tx, rx) = SourceSender::new_test(); let source = ApacheMetricsConfig { - endpoints: vec![format!("http://{}", in_addr)], + endpoints: vec![format!("http://{in_addr}")], scrape_interval_secs: Duration::from_secs(1), namespace: "apache".to_string(), } @@ -457,7 +457,7 @@ Scoreboard: ____S_____I______R____I_______KK___D__C__G_L____________W___________ let (tx, rx) = SourceSender::new_test(); let source = ApacheMetricsConfig { - endpoints: vec![format!("http://{}", in_addr)], + endpoints: vec![format!("http://{in_addr}")], scrape_interval_secs: Duration::from_secs(1), namespace: "custom".to_string(), } diff --git a/src/sources/aws_ecs_metrics/mod.rs b/src/sources/aws_ecs_metrics/mod.rs index 62c964a6f8c39..47d43d39b9459 100644 --- a/src/sources/aws_ecs_metrics/mod.rs +++ b/src/sources/aws_ecs_metrics/mod.rs @@ -571,7 +571,7 @@ mod test { wait_for_tcp(in_addr).await; let config = AwsEcsMetricsSourceConfig { - endpoint: format!("http://{}", in_addr), + endpoint: format!("http://{in_addr}"), version: Version::V4, scrape_interval_secs: Duration::from_secs(1), namespace: default_namespace(), @@ -596,13 +596,10 @@ mod test { match m.tags() { Some(tags) => assert_eq!(tags.get("device"), Some("eth1")), - None => panic!("No tags for metric. {:?}", m), + None => panic!("No tags for metric. {m:?}"), } } - None => panic!( - "Could not find 'network_receive_bytes_total' in {:?}.", - metrics - ), + None => panic!("Could not find 'network_receive_bytes_total' in {metrics:?}."), } } } diff --git a/src/sources/aws_ecs_metrics/parser.rs b/src/sources/aws_ecs_metrics/parser.rs index 93d0d492e7bc4..a0c47dc55da07 100644 --- a/src/sources/aws_ecs_metrics/parser.rs +++ b/src/sources/aws_ecs_metrics/parser.rs @@ -125,7 +125,7 @@ fn counter( tags: MetricTags, ) -> Metric { Metric::new( - format!("{}_{}", prefix, name), + format!("{prefix}_{name}"), MetricKind::Absolute, MetricValue::Counter { value }, ) @@ -143,7 +143,7 @@ fn gauge( tags: MetricTags, ) -> Metric { Metric::new( - format!("{}_{}", prefix, name), + format!("{prefix}_{name}"), MetricKind::Absolute, MetricValue::Gauge { value }, ) diff --git a/src/sources/aws_kinesis_firehose/filters.rs b/src/sources/aws_kinesis_firehose/filters.rs index b9d36a627aece..8df349afe1c85 100644 --- a/src/sources/aws_kinesis_firehose/filters.rs +++ b/src/sources/aws_kinesis_firehose/filters.rs @@ -158,7 +158,7 @@ async fn handle_firehose_rejection(err: warp::Rejection) -> Result String { - format!("Test message: {}", index) + format!("Test message: {index}") } diff --git a/src/sources/datadog_agent/integration_tests.rs b/src/sources/datadog_agent/integration_tests.rs index 45794732d9a00..dcf7df1630a2b 100644 --- a/src/sources/datadog_agent/integration_tests.rs +++ b/src/sources/datadog_agent/integration_tests.rs @@ -52,7 +52,7 @@ async fn wait_for_healthy(address: String) { // wait a second before retry... tokio::time::sleep(Duration::new(1, 0)).await; } - panic!("Unable to reach the Datadog Agent. Check that it's started and that the health endpoint is available at {}.", address); + panic!("Unable to reach the Datadog Agent. Check that it's started and that the health endpoint is available at {address}."); } async fn wait_for_healthy_agent() { diff --git a/src/sources/datadog_agent/logs.rs b/src/sources/datadog_agent/logs.rs index 1fea11bba21d4..83e497ec907c4 100644 --- a/src/sources/datadog_agent/logs.rs +++ b/src/sources/datadog_agent/logs.rs @@ -80,7 +80,7 @@ pub(crate) fn decode_log_body( let messages: Vec = serde_json::from_slice(&body).map_err(|error| { ErrorMessage::new( StatusCode::BAD_REQUEST, - format!("Error parsing JSON: {:?}", error), + format!("Error parsing JSON: {error:?}"), ) })?; diff --git a/src/sources/datadog_agent/metrics.rs b/src/sources/datadog_agent/metrics.rs index a5a46f4ef3922..35d82e76e027b 100644 --- a/src/sources/datadog_agent/metrics.rs +++ b/src/sources/datadog_agent/metrics.rs @@ -187,7 +187,7 @@ fn decode_datadog_sketches( let metrics = decode_ddsketch(body, &api_key, schema_definition).map_err(|error| { ErrorMessage::new( StatusCode::UNPROCESSABLE_ENTITY, - format!("Error decoding Datadog sketch: {:?}", error), + format!("Error decoding Datadog sketch: {error:?}"), ) })?; @@ -218,7 +218,7 @@ fn decode_datadog_series_v2( |error| { ErrorMessage::new( StatusCode::UNPROCESSABLE_ENTITY, - format!("Error decoding Datadog sketch: {:?}", error), + format!("Error decoding Datadog sketch: {error:?}"), ) }, )?; @@ -358,7 +358,7 @@ fn decode_datadog_series_v1( let metrics: DatadogSeriesRequest = serde_json::from_slice(&body).map_err(|error| { ErrorMessage::new( StatusCode::BAD_REQUEST, - format!("Error parsing JSON: {:?}", error), + format!("Error parsing JSON: {error:?}"), ) })?; diff --git a/src/sources/datadog_agent/mod.rs b/src/sources/datadog_agent/mod.rs index b19ed71895985..c87161a260ad3 100644 --- a/src/sources/datadog_agent/mod.rs +++ b/src/sources/datadog_agent/mod.rs @@ -416,7 +416,7 @@ impl DatadogAgentSource { encoding => { return Err(ErrorMessage::new( StatusCode::UNSUPPORTED_MEDIA_TYPE, - format!("Unsupported encoding {}", encoding), + format!("Unsupported encoding {encoding}"), )) } } @@ -477,7 +477,7 @@ fn handle_decode_error(encoding: &str, error: impl std::error::Error) -> ErrorMe }); ErrorMessage::new( StatusCode::UNPROCESSABLE_ENTITY, - format!("Failed decompressing payload with {} decoder.", encoding), + format!("Failed decompressing payload with {encoding} decoder."), ) } diff --git a/src/sources/datadog_agent/tests.rs b/src/sources/datadog_agent/tests.rs index bf36400fcce35..c14dcf07b0266 100644 --- a/src/sources/datadog_agent/tests.rs +++ b/src/sources/datadog_agent/tests.rs @@ -188,7 +188,7 @@ async fn source( async fn send_with_path(address: SocketAddr, body: &str, headers: HeaderMap, path: &str) -> u16 { reqwest::Client::new() - .post(&format!("http://{}{}", address, path)) + .post(&format!("http://{address}{path}")) .headers(headers) .body(body.to_owned()) .send() diff --git a/src/sources/datadog_agent/traces.rs b/src/sources/datadog_agent/traces.rs index 3fb55de8508e6..d5893f7864dba 100644 --- a/src/sources/datadog_agent/traces.rs +++ b/src/sources/datadog_agent/traces.rs @@ -70,7 +70,7 @@ fn build_trace_filter( .map_err(|error| { ErrorMessage::new( StatusCode::UNPROCESSABLE_ENTITY, - format!("Error decoding Datadog traces: {:?}", error), + format!("Error decoding Datadog traces: {error:?}"), ) }) }); diff --git a/src/sources/demo_logs.rs b/src/sources/demo_logs.rs index b50adc60c1c4f..21618384385b2 100644 --- a/src/sources/demo_logs.rs +++ b/src/sources/demo_logs.rs @@ -160,7 +160,7 @@ impl OutputFormat { let line = lines.choose(&mut rand::thread_rng()).unwrap(); if sequence { - format!("{} {}", n, line) + format!("{n} {line}") } else { line.into() } diff --git a/src/sources/dnstap/mod.rs b/src/sources/dnstap/mod.rs index 4f76d48b4371b..f70222ef97eb5 100644 --- a/src/sources/dnstap/mod.rs +++ b/src/sources/dnstap/mod.rs @@ -304,7 +304,7 @@ impl FrameHandler for DnstapFrameHandler { match parse_dnstap_data(&self.schema, &mut log_event, frame) { Err(err) => { emit!(DnstapParseError { - error: format!("Dnstap protobuf decode error {:?}.", err) + error: format!("Dnstap protobuf decode error {err:?}.") }); None } @@ -478,7 +478,7 @@ mod integration_tests { break; } Err(e) => { - println!("Error: {}", e); + println!("Error: {e}"); break; } } @@ -664,7 +664,7 @@ mod integration_tests { dnstap_exec(vec![ "nslookup", "-type=A", - format!("-port={}", port).as_str(), + format!("-port={port}").as_str(), "h1.example.com", "localhost", ]) diff --git a/src/sources/dnstap/parser.rs b/src/sources/dnstap/parser.rs index 3872e5491d704..e45679dbe7116 100644 --- a/src/sources/dnstap/parser.rs +++ b/src/sources/dnstap/parser.rs @@ -165,7 +165,7 @@ impl<'a> DnstapParser<'a> { } } else { emit!(DnstapParseWarning { - error: format!("Unknown dnstap data type: {}", dnstap_data_type_id) + error: format!("Unknown dnstap data type: {dnstap_data_type_id}") }); need_raw_data = true; } @@ -889,8 +889,7 @@ fn to_socket_family_name(socket_family: i32) -> Result<&'static str> { Ok("INET6") } else { Err(Error::from(format!( - "Unknown socket family: {}", - socket_family + "Unknown socket family: {socket_family}" ))) } } @@ -910,8 +909,7 @@ fn to_socket_protocol_name(socket_protocol: i32) -> Result<&'static str> { Ok("DNSCryptTCP") } else { Err(Error::from(format!( - "Unknown socket protocol: {}", - socket_protocol + "Unknown socket protocol: {socket_protocol}" ))) } } @@ -939,7 +937,7 @@ fn to_dnstap_message_type(type_id: i32) -> String { 12 => String::from("ToolResponse"), 13 => String::from("UpdateQuery"), 14 => String::from("UpdateResponse"), - _ => format!("Unknown dnstap message type: {}", type_id), + _ => format!("Unknown dnstap message type: {type_id}"), } } diff --git a/src/sources/docker_logs/mod.rs b/src/sources/docker_logs/mod.rs index fc2c8e8d785c0..7b67c2d85fd12 100644 --- a/src/sources/docker_logs/mod.rs +++ b/src/sources/docker_logs/mod.rs @@ -966,7 +966,7 @@ impl ContainerLogInfo { fn log_since(&self) -> i64 { self.last_log .as_ref() - .map(|&(ref d, _)| d.timestamp()) + .map(|(d, _)| d.timestamp()) .unwrap_or_else(|| self.created.timestamp()) - 1 } diff --git a/src/sources/docker_logs/tests.rs b/src/sources/docker_logs/tests.rs index 0f8cd0981c069..d771684fc192c 100644 --- a/src/sources/docker_logs/tests.rs +++ b/src/sources/docker_logs/tests.rs @@ -99,7 +99,7 @@ mod integration_tests { vec![ "sh", "-c", - format!("echo before; i=0; while [ $i -le 50 ]; do sleep 0.1; echo {}; i=$((i+1)); done", log).as_str(), + format!("echo before; i=0; while [ $i -le 50 ]; do sleep 0.1; echo {log}; i=$((i+1)); done").as_str(), ], docker, false @@ -177,7 +177,7 @@ mod integration_tests { .for_each(|item| async move { let info = item.unwrap(); if let Some(error) = info.error { - panic!("{:?}", error); + panic!("{error:?}"); } }) .await @@ -253,7 +253,7 @@ mod integration_tests { for _ in 0..n { if let Err(error) = container_run(&id, docker).await { container_remove(&id, docker).await; - panic!("Container failed to start with error: {:?}", error); + panic!("Container failed to start with error: {error:?}"); } } id @@ -273,7 +273,7 @@ mod integration_tests { let id = eternal_container(name, label, log, &docker).await; if let Err(error) = container_start(&id, &docker).await { container_remove(&id, &docker).await; - panic!("Container start failed with error: {:?}", error); + panic!("Container start failed with error: {error:?}"); } // Wait for before message @@ -440,7 +440,7 @@ mod integration_tests { assert_eq!(log[CONTAINER], id.into()); assert!(log.get(CREATED_AT).is_some()); assert_eq!(log[IMAGE], "busybox".into()); - assert!(log.get(format!("label.{}", label).as_str()).is_some()); + assert!(log.get(format!("label.{label}").as_str()).is_some()); assert_eq!(events[0].as_log()[&NAME], name.into()); assert_eq!( events[0].as_log()[log_schema().source_type_key()], @@ -642,7 +642,7 @@ mod integration_tests { assert_eq!(log[CONTAINER], id.into()); assert!(log.get(CREATED_AT).is_some()); assert_eq!(log[IMAGE], "busybox".into()); - assert!(log.get(format!("label.{}", label).as_str()).is_some()); + assert!(log.get(format!("label.{label}").as_str()).is_some()); assert_eq!(events[0].as_log()[&NAME], name.into()); assert_eq!( events[0].as_log()[log_schema().source_type_key()], @@ -866,14 +866,14 @@ mod integration_tests { let command = emitted_messages .into_iter() - .map(|message| format!("echo {:?}", message)) + .map(|message| format!("echo {message:?}")) .collect::>() .join(" && "); let id = cmd_container(name, None, vec!["sh", "-c", &command], &docker, false).await; if let Err(error) = container_run(&id, &docker).await { container_remove(&id, &docker).await; - panic!("Container failed to start with error: {:?}", error); + panic!("Container failed to start with error: {error:?}"); } let events = collect_n(out, expected_messages.len()).await; container_remove(&id, &docker).await; @@ -937,14 +937,14 @@ mod integration_tests { let command = emitted_messages .into_iter() - .map(|message| format!("echo {:?}", message)) + .map(|message| format!("echo {message:?}")) .collect::>() .join(" && "); let id = cmd_container(name, None, vec!["sh", "-c", &command], &docker, false).await; if let Err(error) = container_run(&id, &docker).await { container_remove(&id, &docker).await; - panic!("Container failed to start with error: {:?}", error); + panic!("Container failed to start with error: {error:?}"); } let events = collect_n(out, expected_messages.len()).await; container_remove(&id, &docker).await; diff --git a/src/sources/exec/mod.rs b/src/sources/exec/mod.rs index 5af3f79930ffe..db0032605c5fc 100644 --- a/src/sources/exec/mod.rs +++ b/src/sources/exec/mod.rs @@ -685,7 +685,7 @@ fn spawn_reader_thread( sender: Sender<((SmallVec<[Event; 1]>, usize), &'static str)>, ) { // Start the green background thread for collecting - let _ = Box::pin(tokio::spawn(async move { + drop(Box::pin(tokio::spawn(async move { debug!("Start capturing {} command output.", origin); let mut stream = FramedRead::new(reader, decoder); @@ -710,7 +710,7 @@ fn spawn_reader_thread( } debug!("Finished capturing {} command output.", origin); - })); + }))); } #[cfg(test)] @@ -910,8 +910,8 @@ mod tests { expected_command.args(vec!["arg1".to_owned(), "arg2".to_owned()]); // Unfortunately the current_dir is not included in the formatted string - let expected_command_string = format!("{:?}", expected_command); - let command_string = format!("{:?}", command); + let expected_command_string = format!("{expected_command:?}"); + let command_string = format!("{command:?}"); assert_eq!(expected_command_string, command_string); } diff --git a/src/sources/file.rs b/src/sources/file.rs index bdfab5db4ec19..5ddcf8a285c9a 100644 --- a/src/sources/file.rs +++ b/src/sources/file.rs @@ -1112,8 +1112,8 @@ mod tests { sleep_500_millis().await; // The files must be observed at their original lengths before writing to them for i in 0..n { - writeln!(&mut file1, "hello {}", i).unwrap(); - writeln!(&mut file2, "goodbye {}", i).unwrap(); + writeln!(&mut file1, "hello {i}").unwrap(); + writeln!(&mut file2, "goodbye {i}").unwrap(); } sleep_500_millis().await; @@ -1126,14 +1126,14 @@ mod tests { for event in received { let line = event.as_log()[log_schema().message_key()].to_string_lossy(); if line.starts_with("hello") { - assert_eq!(line, format!("hello {}", hello_i)); + assert_eq!(line, format!("hello {hello_i}")); assert_eq!( event.as_log()["file"].to_string_lossy(), path1.to_str().unwrap() ); hello_i += 1; } else { - assert_eq!(line, format!("goodbye {}", goodbye_i)); + assert_eq!(line, format!("goodbye {goodbye_i}")); assert_eq!( event.as_log()["file"].to_string_lossy(), path2.to_str().unwrap() @@ -1191,18 +1191,18 @@ mod tests { sleep_500_millis().await; // The files must be observed at its original length before writing to it for i in 0..n { - writeln!(&mut file, "pretrunc {}", i).unwrap(); + writeln!(&mut file, "pretrunc {i}").unwrap(); } sleep_500_millis().await; // The writes must be observed before truncating file.set_len(0).unwrap(); - file.seek(std::io::SeekFrom::Start(0)).unwrap(); + file.rewind().unwrap(); sleep_500_millis().await; // The truncate must be observed before writing again for i in 0..n { - writeln!(&mut file, "posttrunc {}", i).unwrap(); + writeln!(&mut file, "posttrunc {i}").unwrap(); } sleep_500_millis().await; @@ -1221,9 +1221,9 @@ mod tests { let line = event.as_log()[log_schema().message_key()].to_string_lossy(); if pre_trunc { - assert_eq!(line, format!("pretrunc {}", i)); + assert_eq!(line, format!("pretrunc {i}")); } else { - assert_eq!(line, format!("posttrunc {}", i)); + assert_eq!(line, format!("posttrunc {i}")); } i += 1; @@ -1252,7 +1252,7 @@ mod tests { sleep_500_millis().await; // The files must be observed at its original length before writing to it for i in 0..n { - writeln!(&mut file, "prerot {}", i).unwrap(); + writeln!(&mut file, "prerot {i}").unwrap(); } sleep_500_millis().await; // The writes must be observed before rotating @@ -1263,7 +1263,7 @@ mod tests { sleep_500_millis().await; // The rotation must be observed before writing again for i in 0..n { - writeln!(&mut file, "postrot {}", i).unwrap(); + writeln!(&mut file, "postrot {i}").unwrap(); } sleep_500_millis().await; @@ -1282,9 +1282,9 @@ mod tests { let line = event.as_log()[log_schema().message_key()].to_string_lossy(); if pre_rot { - assert_eq!(line, format!("prerot {}", i)); + assert_eq!(line, format!("prerot {i}")); } else { - assert_eq!(line, format!("postrot {}", i)); + assert_eq!(line, format!("postrot {i}")); } i += 1; @@ -1319,10 +1319,10 @@ mod tests { sleep_500_millis().await; // The files must be observed at their original lengths before writing to them for i in 0..n { - writeln!(&mut file1, "1 {}", i).unwrap(); - writeln!(&mut file2, "2 {}", i).unwrap(); - writeln!(&mut file3, "3 {}", i).unwrap(); - writeln!(&mut file4, "4 {}", i).unwrap(); + writeln!(&mut file1, "1 {i}").unwrap(); + writeln!(&mut file2, "2 {i}").unwrap(); + writeln!(&mut file3, "3 {i}").unwrap(); + writeln!(&mut file4, "4 {i}").unwrap(); } sleep_500_millis().await; @@ -1704,7 +1704,7 @@ mod tests { writeln!(&mut file, "this is too long").unwrap(); writeln!(&mut file, "11 eleven11").unwrap(); let super_long = "This line is super long and will take up more space than BufReader's internal buffer, just to make sure that everything works properly when multiple read calls are involved".repeat(10000); - writeln!(&mut file, "{}", super_long).unwrap(); + writeln!(&mut file, "{super_long}").unwrap(); writeln!(&mut file, "exactly 10").unwrap(); writeln!(&mut file, "it can end on a line that's too long").unwrap(); @@ -2155,7 +2155,7 @@ mod tests { sleep_500_millis().await; // The files must be observed at their original lengths before writing to them for i in 0..n { - writeln!(&mut file, "{}", i).unwrap(); + writeln!(&mut file, "{i}").unwrap(); } std::mem::drop(file); diff --git a/src/sources/fluent/message.rs b/src/sources/fluent/message.rs index db45e6c4e0ea2..f3e01e256e354 100644 --- a/src/sources/fluent/message.rs +++ b/src/sources/fluent/message.rs @@ -96,8 +96,7 @@ impl<'de> serde::de::Deserialize<'de> for FluentEventTime { if tag != 0 { return Err(serde::de::Error::custom(format!( - "expected extension type 0 for fluent timestamp, got {}", - tag + "expected extension type 0 for fluent timestamp, got {tag}" ))); } diff --git a/src/sources/fluent/mod.rs b/src/sources/fluent/mod.rs index 3653e47139857..e0e8e2d7c9a7d 100644 --- a/src/sources/fluent/mod.rs +++ b/src/sources/fluent/mod.rs @@ -256,13 +256,13 @@ pub enum DecodeError { impl std::fmt::Display for DecodeError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - DecodeError::IO(err) => write!(f, "{}", err), - DecodeError::Decode(err) => write!(f, "{}", err), + DecodeError::IO(err) => write!(f, "{err}"), + DecodeError::Decode(err) => write!(f, "{err}"), DecodeError::UnknownCompression(compression) => { - write!(f, "unknown compression: {}", compression) + write!(f, "unknown compression: {compression}") } DecodeError::UnexpectedValue(value) => { - write!(f, "unexpected msgpack value, ignoring: {}", value) + write!(f, "unexpected msgpack value, ignoring: {value}") } } } @@ -533,7 +533,7 @@ impl TcpSourceAcker for FluentAcker { let mut acks = String::new(); for chunk in self.chunks { let ack = match ack { - TcpSourceAck::Ack => format!(r#"{{"ack": "{}"}}"#, chunk), + TcpSourceAck::Ack => format!(r#"{{"ack": "{chunk}"}}"#), _ => String::from("{}"), }; acks.push_str(&ack); @@ -1048,7 +1048,7 @@ mod integration_tests { fn make_file(name: &str, content: &str) -> tempfile::TempDir { let dir = tempfile::tempdir().unwrap(); let mut file = File::create(dir.path().join(name)).unwrap(); - write!(&mut file, "{}", content).unwrap(); + write!(&mut file, "{content}").unwrap(); dir } @@ -1102,7 +1102,7 @@ mod integration_tests { .run(async move { wait_for_tcp(test_address).await; reqwest::Client::new() - .post(&format!("http://{}/", test_address)) + .post(&format!("http://{test_address}/")) .header("content-type", "application/json") .body(body.to_string()) .send() @@ -1183,7 +1183,7 @@ mod integration_tests { .run(async move { wait_for_tcp(test_address).await; reqwest::Client::new() - .post(&format!("http://{}/", test_address)) + .post(&format!("http://{test_address}/")) .header("content-type", "application/json") .body(body.to_string()) .send() diff --git a/src/sources/heroku_logs.rs b/src/sources/heroku_logs.rs index 95fd20cc0348d..c8e1f074b381e 100644 --- a/src/sources/heroku_logs.rs +++ b/src/sources/heroku_logs.rs @@ -291,7 +291,7 @@ fn get_header<'a>(header_map: &'a HeaderMap, name: &str) -> Result<&'a str, Erro fn header_error_message(name: &str, msg: &str) -> ErrorMessage { ErrorMessage::new( StatusCode::BAD_REQUEST, - format!("Invalid request header {:?}: {:?}", name, msg), + format!("Invalid request header {name:?}: {msg:?}"), ) } @@ -458,7 +458,7 @@ mod tests { query: &str, ) -> u16 { let len = body.lines().count(); - let mut req = reqwest::Client::new().post(format!("http://{}/events?{}", address, query)); + let mut req = reqwest::Client::new().post(format!("http://{address}/events?{query}")); if let Some(auth) = auth { req = req.basic_auth(auth.username, Some(auth.password.inner())); } diff --git a/src/sources/host_metrics/cgroups.rs b/src/sources/host_metrics/cgroups.rs index 40636cbc3f788..34956e76da5c8 100644 --- a/src/sources/host_metrics/cgroups.rs +++ b/src/sources/host_metrics/cgroups.rs @@ -505,8 +505,8 @@ mod tests { base.d("memory"); base.d("unified"); for subdir in SUBDIRS { - base.group(&format!("unified/{}", subdir), CPU_STAT, Some("")); - base.group(&format!("memory/{}", subdir), MEMORY_STAT, None); + base.group(&format!("unified/{subdir}"), CPU_STAT, Some("")); + base.group(&format!("memory/{subdir}"), MEMORY_STAT, None); } base.test().await; } @@ -524,11 +524,11 @@ mod tests { base.d("unified"); for subdir in SUBDIRS { base.group( - &format!("unified/{}", subdir), + &format!("unified/{subdir}"), if subdir == "." { NONE } else { CPU_STAT }, Some(""), ); - base.group(&format!("memory/{}", subdir), MEMORY_STAT, None); + base.group(&format!("memory/{subdir}"), MEMORY_STAT, None); } base.test().await; } @@ -541,8 +541,8 @@ mod tests { base.d("cpu"); base.d("memory"); for subdir in SUBDIRS { - base.group(&format!("cpu/{}", subdir), CPU_STAT, None); - base.group(&format!("memory/{}", subdir), MEMORY_STAT, None); + base.group(&format!("cpu/{subdir}"), CPU_STAT, None); + base.group(&format!("memory/{subdir}"), MEMORY_STAT, None); } } diff --git a/src/sources/host_metrics/disk.rs b/src/sources/host_metrics/disk.rs index 5e23e2f03bad8..c41830ac1fa66 100644 --- a/src/sources/host_metrics/disk.rs +++ b/src/sources/host_metrics/disk.rs @@ -103,12 +103,7 @@ mod tests { "disk_written_bytes_total", "disk_writes_completed_total", ] { - assert_eq!( - count_name(&metrics, name), - metrics.len() / 4, - "name={}", - name - ); + assert_eq!(count_name(&metrics, name), metrics.len() / 4, "name={name}"); } // They should all have a "device" tag diff --git a/src/sources/host_metrics/filesystem.rs b/src/sources/host_metrics/filesystem.rs index 9be9ae78dbc8b..2664e74d9d58a 100644 --- a/src/sources/host_metrics/filesystem.rs +++ b/src/sources/host_metrics/filesystem.rs @@ -173,12 +173,7 @@ mod tests { "filesystem_used_bytes", "filesystem_used_ratio", ] { - assert_eq!( - count_name(&metrics, name), - metrics.len() / 4, - "name={}", - name - ); + assert_eq!(count_name(&metrics, name), metrics.len() / 4, "name={name}"); } // They should all have "filesystem" and "mountpoint" tags diff --git a/src/sources/host_metrics/mod.rs b/src/sources/host_metrics/mod.rs index 9d356ee8d0d1b..ab9065691d56f 100644 --- a/src/sources/host_metrics/mod.rs +++ b/src/sources/host_metrics/mod.rs @@ -721,8 +721,7 @@ pub(self) mod tests { assert!( all_metrics_count > some_metrics.len(), - "collector={:?}", - collector + "collector={collector:?}" ); } } @@ -856,7 +855,7 @@ pub(self) mod tests { // Pick an arbitrary key value if let Some(key) = keys.into_iter().next() { let key_prefix = &key[..key.len() - 1].to_string(); - let key_prefix_pattern = PatternWrapper::try_from(format!("{}*", key_prefix)).unwrap(); + let key_prefix_pattern = PatternWrapper::try_from(format!("{key_prefix}*")).unwrap(); let key_pattern = PatternWrapper::try_from(key.clone()).unwrap(); let filter = FilterList { diff --git a/src/sources/http_client/tests.rs b/src/sources/http_client/tests.rs index 8676dc5678cd4..642238aaf4d24 100644 --- a/src/sources/http_client/tests.rs +++ b/src/sources/http_client/tests.rs @@ -47,7 +47,7 @@ async fn bytes_decoding() { tokio::spawn(warp::serve(dummy_endpoint).run(in_addr)); run_compliance(HttpClientConfig { - endpoint: format!("http://{}/endpoint", in_addr), + endpoint: format!("http://{in_addr}/endpoint"), interval: INTERVAL, query: HashMap::new(), decoding: default_decoding(), @@ -75,7 +75,7 @@ async fn json_decoding_newline_delimited() { wait_for_tcp(in_addr).await; run_compliance(HttpClientConfig { - endpoint: format!("http://{}/endpoint", in_addr), + endpoint: format!("http://{in_addr}/endpoint"), interval: INTERVAL, query: HashMap::new(), decoding: DeserializerConfig::Json, @@ -105,7 +105,7 @@ async fn json_decoding_character_delimited() { wait_for_tcp(in_addr).await; run_compliance(HttpClientConfig { - endpoint: format!("http://{}/endpoint", in_addr), + endpoint: format!("http://{in_addr}/endpoint"), interval: INTERVAL, query: HashMap::new(), decoding: DeserializerConfig::Json, @@ -131,13 +131,13 @@ async fn request_query_applied() { let dummy_endpoint = warp::path!("endpoint") .and(warp::query::raw()) - .map(|query| format!(r#"{{"data" : "{}"}}"#, query)); + .map(|query| format!(r#"{{"data" : "{query}"}}"#)); tokio::spawn(warp::serve(dummy_endpoint).run(in_addr)); wait_for_tcp(in_addr).await; let events = run_compliance(HttpClientConfig { - endpoint: format!("http://{}/endpoint?key1=val1", in_addr), + endpoint: format!("http://{in_addr}/endpoint?key1=val1"), interval: INTERVAL, query: HashMap::from([ ("key1".to_string(), vec!["val2".to_string()]), @@ -205,7 +205,7 @@ async fn headers_applied() { wait_for_tcp(in_addr).await; run_compliance(HttpClientConfig { - endpoint: format!("http://{}/endpoint", in_addr), + endpoint: format!("http://{in_addr}/endpoint"), interval: INTERVAL, query: HashMap::new(), decoding: default_decoding(), @@ -236,7 +236,7 @@ async fn accept_header_override() { wait_for_tcp(in_addr).await; run_compliance(HttpClientConfig { - endpoint: format!("http://{}/endpoint", in_addr), + endpoint: format!("http://{in_addr}/endpoint"), interval: INTERVAL, query: HashMap::new(), decoding: DeserializerConfig::Bytes, diff --git a/src/sources/http_server.rs b/src/sources/http_server.rs index f26ea4cdd9107..5cb74a0d9b0de 100644 --- a/src/sources/http_server.rs +++ b/src/sources/http_server.rs @@ -447,7 +447,7 @@ impl HttpSource for SimpleHttpSource { // handling is needed here return Err(ErrorMessage::new( StatusCode::BAD_REQUEST, - format!("Failed decoding body: {}", error), + format!("Failed decoding body: {error}"), )); } } @@ -554,7 +554,7 @@ mod tests { async fn send(address: SocketAddr, body: &str) -> u16 { reqwest::Client::new() - .post(&format!("http://{}/", address)) + .post(&format!("http://{address}/")) .body(body.to_owned()) .send() .await @@ -565,7 +565,7 @@ mod tests { async fn send_with_headers(address: SocketAddr, body: &str, headers: HeaderMap) -> u16 { reqwest::Client::new() - .post(&format!("http://{}/", address)) + .post(&format!("http://{address}/")) .headers(headers) .body(body.to_owned()) .send() @@ -577,7 +577,7 @@ mod tests { async fn send_with_query(address: SocketAddr, body: &str, query: &str) -> u16 { reqwest::Client::new() - .post(&format!("http://{}?{}", address, query)) + .post(&format!("http://{address}?{query}")) .body(body.to_owned()) .send() .await @@ -588,7 +588,7 @@ mod tests { async fn send_with_path(address: SocketAddr, body: &str, path: &str) -> u16 { reqwest::Client::new() - .post(&format!("http://{}{}", address, path)) + .post(&format!("http://{address}{path}")) .body(body.to_owned()) .send() .await @@ -601,7 +601,7 @@ mod tests { let method = Method::from_bytes(method.to_owned().as_bytes()).unwrap(); format!("method: {}", method.as_str()); reqwest::Client::new() - .request(method, &format!("http://{}{}", address, path)) + .request(method, &format!("http://{address}{path}")) .body(body.to_owned()) .send() .await @@ -612,7 +612,7 @@ mod tests { async fn send_bytes(address: SocketAddr, body: Vec, headers: HeaderMap) -> u16 { reqwest::Client::new() - .post(&format!("http://{}/", address)) + .post(&format!("http://{address}/")) .headers(headers) .body(body) .send() diff --git a/src/sources/journald.rs b/src/sources/journald.rs index 845885880b2c4..6adb9f39b98f0 100644 --- a/src/sources/journald.rs +++ b/src/sources/journald.rs @@ -643,7 +643,7 @@ impl StartJournalctl { } if let Some(cursor) = checkpoint { - command.arg(format!("--after-cursor={}", cursor)); + command.arg(format!("--after-cursor={cursor}")); } else if self.since_now { command.arg("--since=now"); } else { @@ -779,7 +779,7 @@ fn fixup_unit(unit: &str) -> String { if unit.contains('.') { unit.into() } else { - format!("{}.service", unit) + format!("{unit}.service") } } @@ -928,7 +928,7 @@ impl Checkpointer { async fn set(&mut self, token: &str) -> Result<(), io::Error> { self.file.seek(SeekFrom::Start(0)).await?; - self.file.write_all(format!("{}\n", token).as_bytes()).await + self.file.write_all(format!("{token}\n").as_bytes()).await } async fn get(&mut self) -> Result, io::Error> { @@ -1023,7 +1023,7 @@ mod checkpointer_tests { assert_eq!(checkpointer.get().await.unwrap().unwrap(), "first test"); let contents = read_to_string(filename.clone()) .await - .unwrap_or_else(|_| panic!("Failed to read: {:?}", filename)); + .unwrap_or_else(|_| panic!("Failed to read: {filename:?}")); assert!(contents.starts_with("first test\n")); checkpointer @@ -1033,7 +1033,7 @@ mod checkpointer_tests { assert_eq!(checkpointer.get().await.unwrap().unwrap(), "second"); let contents = read_to_string(filename.clone()) .await - .unwrap_or_else(|_| panic!("Failed to read: {:?}", filename)); + .unwrap_or_else(|_| panic!("Failed to read: {filename:?}")); assert!(contents.starts_with("second\n")); } } @@ -1400,7 +1400,7 @@ mod tests { let since_now = false; let command = create_command(&path, journal_dir, current_boot_only, since_now, cursor); - let cmd_line = format!("{:?}", command); + let cmd_line = format!("{command:?}"); assert!(!cmd_line.contains("--directory=")); assert!(!cmd_line.contains("--boot")); assert!(cmd_line.contains("--since=2000-01-01")); @@ -1409,7 +1409,7 @@ mod tests { let journal_dir = None; let command = create_command(&path, journal_dir, current_boot_only, since_now, cursor); - let cmd_line = format!("{:?}", command); + let cmd_line = format!("{command:?}"); assert!(cmd_line.contains("--since=now")); let journal_dir = Some(PathBuf::from("/tmp/journal-dir")); @@ -1417,7 +1417,7 @@ mod tests { let cursor = Some("2021-01-01"); let command = create_command(&path, journal_dir, current_boot_only, since_now, cursor); - let cmd_line = format!("{:?}", command); + let cmd_line = format!("{command:?}"); assert!(cmd_line.contains("--directory=/tmp/journal-dir")); assert!(cmd_line.contains("--boot")); assert!(cmd_line.contains("--after-cursor=")); diff --git a/src/sources/kafka.rs b/src/sources/kafka.rs index ce2d232d43dfa..ef675010e555d 100644 --- a/src/sources/kafka.rs +++ b/src/sources/kafka.rs @@ -902,8 +902,8 @@ mod integration_test { let producer: FutureProducer = client_config(None); for i in 0..count { - let text = format!("{} {:03}", TEXT, i); - let key = format!("{} {}", KEY, i); + let text = format!("{TEXT} {i:03}"); + let key = format!("{KEY} {i}"); let record = FutureRecord::to(&topic) .payload(&text) .key(&key) @@ -914,7 +914,7 @@ mod integration_test { })); if let Err(error) = producer.send(record, Timeout::Never).await { - panic!("Cannot send event to Kafka: {:?}", error); + panic!("Cannot send event to Kafka: {error:?}"); } } @@ -998,12 +998,9 @@ mod integration_test { if let LogNamespace::Legacy = log_namespace { assert_eq!( event.as_log()[log_schema().message_key()], - format!("{} {:03}", TEXT, i).into() - ); - assert_eq!( - event.as_log()["message_key"], - format!("{} {}", KEY, i).into() + format!("{TEXT} {i:03}").into() ); + assert_eq!(event.as_log()["message_key"], format!("{KEY} {i}").into()); assert_eq!( event.as_log()[log_schema().source_type_key()], "kafka".into() @@ -1032,11 +1029,11 @@ mod integration_test { assert_eq!( event.as_log().value(), - &vrl::value!(format!("{} {:03}", TEXT, i)) + &vrl::value!(format!("{TEXT} {i:03}")) ); assert_eq!( meta.get(path!("kafka", "message_key")).unwrap(), - &vrl::value!(format!("{} {}", KEY, i)) + &vrl::value!(format!("{KEY} {i}")) ); assert_eq!( @@ -1216,14 +1213,14 @@ mod integration_test { Offset::Offset(offset) => { assert!((offset as isize - events1.len() as isize).abs() <= 1) } - o => panic!("Invalid offset for partition 0 {:?}", o), + o => panic!("Invalid offset for partition 0 {o:?}"), } match fetch_tpl_offset(&group_id, &topic, 1) { Offset::Offset(offset) => { assert!((offset as isize - events2.len() as isize).abs() <= 1) } - o => panic!("Invalid offset for partition 0 {:?}", o), + o => panic!("Invalid offset for partition 0 {o:?}"), } let mut all_events = events1 diff --git a/src/sources/kubernetes_logs/mod.rs b/src/sources/kubernetes_logs/mod.rs index 195b85afa2379..c2061eb7f1c61 100644 --- a/src/sources/kubernetes_logs/mod.rs +++ b/src/sources/kubernetes_logs/mod.rs @@ -532,8 +532,7 @@ impl Source { { std::env::var(SELF_NODE_NAME_ENV_KEY).map_err(|_| { format!( - "self_node_name config value or {} env var is not set", - SELF_NODE_NAME_ENV_KEY + "self_node_name config value or {SELF_NODE_NAME_ENV_KEY} env var is not set" ) })? } else { @@ -983,7 +982,7 @@ fn prepare_field_selector(config: &Config, self_node_name: &str) -> crate::Resul ?self_node_name ); - let field_selector = format!("spec.nodeName={}", self_node_name); + let field_selector = format!("spec.nodeName={self_node_name}"); if config.extra_field_selector.is_empty() { return Ok(field_selector); @@ -997,7 +996,7 @@ fn prepare_field_selector(config: &Config, self_node_name: &str) -> crate::Resul // This function constructs the selector for a node to annotate entries with a node metadata. fn prepare_node_selector(self_node_name: &str) -> crate::Result { - Ok(format!("metadata.name={}", self_node_name)) + Ok(format!("metadata.name={self_node_name}")) } // This function constructs the effective label selector to use, based on @@ -1009,7 +1008,7 @@ fn prepare_label_selector(selector: &str) -> String { return BUILT_IN.to_string(); } - format!("{},{}", BUILT_IN, selector) + format!("{BUILT_IN},{selector}") } #[cfg(test)] diff --git a/src/sources/kubernetes_logs/parser/docker.rs b/src/sources/kubernetes_logs/parser/docker.rs index e6a7891d9540b..c6ac58cccdd61 100644 --- a/src/sources/kubernetes_logs/parser/docker.rs +++ b/src/sources/kubernetes_logs/parser/docker.rs @@ -344,7 +344,7 @@ pub mod tests { let mut output = OutputBuffer::default(); parser.transform(&mut output, input.into()); - assert!(output.is_empty(), "Expected no events: {:?}", output); + assert!(output.is_empty(), "Expected no events: {output:?}"); } } @@ -360,7 +360,7 @@ pub mod tests { let mut output = OutputBuffer::default(); parser.transform(&mut output, input.into()); - assert!(output.is_empty(), "Expected no events: {:?}", output); + assert!(output.is_empty(), "Expected no events: {output:?}"); } } } diff --git a/src/sources/kubernetes_logs/parser/mod.rs b/src/sources/kubernetes_logs/parser/mod.rs index dc5920fdfac76..58206c7dc9a46 100644 --- a/src/sources/kubernetes_logs/parser/mod.rs +++ b/src/sources/kubernetes_logs/parser/mod.rs @@ -134,7 +134,7 @@ mod tests { let mut output = OutputBuffer::default(); parser.transform(&mut output, input.into()); - assert!(output.is_empty(), "Expected no events: {:?}", output); + assert!(output.is_empty(), "Expected no events: {output:?}"); } } @@ -162,7 +162,7 @@ mod tests { let mut output = OutputBuffer::default(); parser.transform(&mut output, input.into()); - assert!(output.is_empty(), "Expected no events: {:?}", output); + assert!(output.is_empty(), "Expected no events: {output:?}"); } } } diff --git a/src/sources/nats.rs b/src/sources/nats.rs index 4086333b96d46..0e06816db35e1 100644 --- a/src/sources/nats.rs +++ b/src/sources/nats.rs @@ -404,8 +404,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -436,8 +435,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -468,8 +466,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -499,8 +496,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -530,8 +526,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -562,8 +557,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -594,8 +588,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Config { .. })), - "publish_and_check failed, expected BuildError::Config, got: {:?}", - r + "publish_and_check failed, expected BuildError::Config, got: {r:?}" ); } @@ -627,8 +620,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -654,8 +646,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -689,8 +680,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -722,8 +712,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } @@ -759,8 +748,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( r.is_ok(), - "publish_and_check failed, expected Ok(()), got: {:?}", - r + "publish_and_check failed, expected Ok(()), got: {r:?}" ); } @@ -796,8 +784,7 @@ mod integration_tests { let r = publish_and_check(conf).await; assert!( matches!(r, Err(BuildError::Connect { .. })), - "publish_and_check failed, expected BuildError::Connect, got: {:?}", - r + "publish_and_check failed, expected BuildError::Connect, got: {r:?}" ); } } diff --git a/src/sources/nginx_metrics/mod.rs b/src/sources/nginx_metrics/mod.rs index 1d123b1cae67a..1a68a00132e18 100644 --- a/src/sources/nginx_metrics/mod.rs +++ b/src/sources/nginx_metrics/mod.rs @@ -186,7 +186,7 @@ impl NginxMetrics { let uri: Uri = endpoint.parse().context(HostInvalidUriSnafu)?; Ok(match (uri.host().unwrap_or(""), uri.port()) { (host, None) => host.to_owned(), - (host, Some(port)) => format!("{}:{}", host, port), + (host, Some(port)) => format!("{host}:{port}"), }) } diff --git a/src/sources/opentelemetry/http.rs b/src/sources/opentelemetry/http.rs index e2d5dede3edfd..e010f73246892 100644 --- a/src/sources/opentelemetry/http.rs +++ b/src/sources/opentelemetry/http.rs @@ -93,7 +93,7 @@ fn decode_body( let request = ExportLogsServiceRequest::decode(body).map_err(|error| { ErrorMessage::new( StatusCode::BAD_REQUEST, - format!("Could not decode request: {}", error), + format!("Could not decode request: {error}"), ) })?; @@ -164,7 +164,7 @@ async fn handle_rejection(err: Rejection) -> Result { - metric.replace_tag(format!("exported_{}", tag), old_instance); + metric.replace_tag(format!("exported_{tag}"), old_instance); metric.replace_tag(tag.clone(), instance.clone()); } (true, Some(_)) => {} @@ -258,7 +258,7 @@ impl HttpClientContext for PrometheusScrapeContext { { match (honor_label, metric.tag_value(tag)) { (false, Some(old_endpoint)) => { - metric.replace_tag(format!("exported_{}", tag), old_endpoint); + metric.replace_tag(format!("exported_{tag}"), old_endpoint); metric.replace_tag(tag.clone(), endpoint.clone()); } (true, Some(_)) => {} @@ -339,7 +339,7 @@ mod test { wait_for_tcp(in_addr).await; let config = PrometheusScrapeConfig { - endpoints: vec![format!("http://{}/metrics", in_addr)], + endpoints: vec![format!("http://{in_addr}/metrics")], interval: Duration::from_secs(1), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), @@ -372,7 +372,7 @@ mod test { wait_for_tcp(in_addr).await; let config = PrometheusScrapeConfig { - endpoints: vec![format!("http://{}/metrics", in_addr)], + endpoints: vec![format!("http://{in_addr}/metrics")], interval: Duration::from_secs(1), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), @@ -423,7 +423,7 @@ mod test { wait_for_tcp(in_addr).await; let config = PrometheusScrapeConfig { - endpoints: vec![format!("http://{}/metrics", in_addr)], + endpoints: vec![format!("http://{in_addr}/metrics")], interval: Duration::from_secs(1), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), @@ -488,7 +488,7 @@ mod test { wait_for_tcp(in_addr).await; let config = PrometheusScrapeConfig { - endpoints: vec![format!("http://{}/metrics", in_addr)], + endpoints: vec![format!("http://{in_addr}/metrics")], interval: Duration::from_secs(1), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), @@ -533,9 +533,8 @@ mod test { let dummy_endpoint = warp::path!("metrics").and(warp::query::raw()).map(|query| { format!( r#" - promhttp_metric_handler_requests_total{{query="{}"}} 100 1612411516789 - "#, - query + promhttp_metric_handler_requests_total{{query="{query}"}} 100 1612411516789 + "# ) }); @@ -543,7 +542,7 @@ mod test { wait_for_tcp(in_addr).await; let config = PrometheusScrapeConfig { - endpoints: vec![format!("http://{}/metrics?key1=val1", in_addr)], + endpoints: vec![format!("http://{in_addr}/metrics?key1=val1")], interval: Duration::from_secs(1), instance_tag: Some("instance".to_string()), endpoint_tag: Some("endpoint".to_string()), @@ -652,7 +651,7 @@ mod test { config.add_source( "in", PrometheusScrapeConfig { - endpoints: vec![format!("http://{}", in_addr)], + endpoints: vec![format!("http://{in_addr}")], instance_tag: None, endpoint_tag: None, honor_labels: false, @@ -683,7 +682,7 @@ mod test { sleep(Duration::from_secs(1)).await; let response = Client::new() - .get(format!("http://{}/metrics", out_addr).parse().unwrap()) + .get(format!("http://{out_addr}/metrics").parse().unwrap()) .await .unwrap(); @@ -768,7 +767,7 @@ mod integration_tests { metrics .iter() .find(|metric| metric.name() == name) - .unwrap_or_else(|| panic!("Missing metric {:?}", name)) + .unwrap_or_else(|| panic!("Missing metric {name:?}")) }; // Sample some well-known metrics diff --git a/src/sources/redis/mod.rs b/src/sources/redis/mod.rs index 6a3761f169450..37adf019587da 100644 --- a/src/sources/redis/mod.rs +++ b/src/sources/redis/mod.rs @@ -83,9 +83,7 @@ impl From<&redis::ConnectionInfo> for ConnectionInfo { fn from(redis_conn_info: &redis::ConnectionInfo) -> Self { let (protocol, endpoint) = match &redis_conn_info.addr { redis::ConnectionAddr::Tcp(host, port) - | redis::ConnectionAddr::TcpTls { host, port, .. } => { - ("tcp", format!("{}:{}", host, port)) - } + | redis::ConnectionAddr::TcpTls { host, port, .. } => ("tcp", format!("{host}:{port}")), redis::ConnectionAddr::Unix(path) => ("uds", path.to_string_lossy().to_string()), }; diff --git a/src/sources/socket/mod.rs b/src/sources/socket/mod.rs index 94080ff963b9d..4b6981eb96fa9 100644 --- a/src/sources/socket/mod.rs +++ b/src/sources/socket/mod.rs @@ -823,7 +823,7 @@ mod test { fn send_lines_udp(addr: SocketAddr, lines: impl IntoIterator) -> SocketAddr { let bind = next_addr(); let socket = UdpSocket::bind(bind) - .map_err(|error| panic!("{:}", error)) + .map_err(|error| panic!("{error:}")) .ok() .unwrap(); @@ -831,7 +831,7 @@ mod test { assert_eq!( socket .send_to(line.as_bytes(), addr) - .map_err(|error| panic!("{:}", error)) + .map_err(|error| panic!("{error:}")) .ok() .unwrap(), line.as_bytes().len() @@ -1261,10 +1261,9 @@ mod test { fn parses_unix_config(mode: &str) -> SocketConfig { toml::from_str::(&format!( r#" - mode = "{}" + mode = "{mode}" path = "/does/not/exist" - "#, - mode + "# )) .unwrap() } @@ -1273,11 +1272,10 @@ mod test { fn parses_unix_config_file_mode(mode: &str) -> SocketConfig { toml::from_str::(&format!( r#" - mode = "{}" + mode = "{mode}" path = "/does/not/exist" socket_file_mode = 0o777 - "#, - mode + "# )) .unwrap() } diff --git a/src/sources/splunk_hec/mod.rs b/src/sources/splunk_hec/mod.rs index 75e29f03f8eec..904503c414a13 100644 --- a/src/sources/splunk_hec/mod.rs +++ b/src/sources/splunk_hec/mod.rs @@ -1272,7 +1272,7 @@ mod tests { ) -> (VectorSink, Healthcheck) { HecLogsSinkConfig { default_token: TOKEN.to_owned().into(), - endpoint: format!("http://{}", address), + endpoint: format!("http://{address}"), host_key: "host".to_owned(), indexed_fields: vec![], index: None, @@ -1357,8 +1357,8 @@ mod tests { opts: &SendWithOpts<'_>, ) -> RequestBuilder { let mut b = reqwest::Client::new() - .post(format!("http://{}/{}", address, api)) - .header("Authorization", format!("Splunk {}", token)); + .post(format!("http://{address}/{api}")) + .header("Authorization", format!("Splunk {token}")); b = match opts.channel { Some(c) => match c { @@ -1451,7 +1451,7 @@ mod tests { .await; let messages = (0..n) - .map(|i| format!("multiple_simple_text_event_{}", i)) + .map(|i| format!("multiple_simple_text_event_{i}")) .collect::>(); let events = channel_n(messages.clone(), sink, source).await; @@ -1498,7 +1498,7 @@ mod tests { .await; let messages = (0..n) - .map(|i| format!("multiple_simple_json_event{}", i)) + .map(|i| format!("multiple_simple_json_event{i}")) .collect::>(); let events = channel_n(messages.clone(), sink, source).await; @@ -1736,7 +1736,7 @@ mod tests { let (_source, address) = source(None).await; let res = reqwest::Client::new() - .get(&format!("http://{}/services/collector/health", address)) + .get(&format!("http://{address}/services/collector/health")) .header("Authorization", format!("Splunk {}", "invalid token")) .send() .await @@ -1750,7 +1750,7 @@ mod tests { let (_source, address) = source(None).await; let res = reqwest::Client::new() - .get(&format!("http://{}/services/collector/health", address)) + .get(&format!("http://{address}/services/collector/health")) .send() .await .unwrap(); @@ -1955,7 +1955,7 @@ mod tests { "http://{}/{}", address, "services/collector/event" )) - .header("Authorization", format!("Splunk {}", TOKEN)) + .header("Authorization", format!("Splunk {TOKEN}")) .body::<&[u8]>(message); assert_eq!(200, b.send().await.unwrap().status().as_u16()); diff --git a/src/sources/statsd/parser.rs b/src/sources/statsd/parser.rs index a21a6e128dd53..8e0de98131486 100644 --- a/src/sources/statsd/parser.rs +++ b/src/sources/statsd/parser.rs @@ -193,7 +193,7 @@ pub enum ParseError { impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Statsd parse error: {:?}", self) + write!(f, "Statsd parse error: {self:?}") } } diff --git a/src/sources/syslog.rs b/src/sources/syslog.rs index e5a0c9f68af9d..2ea9b83fc039d 100644 --- a/src/sources/syslog.rs +++ b/src/sources/syslog.rs @@ -917,7 +917,7 @@ mod test { #[test] fn syslog_ng_default_network() { let msg = "i am foobar"; - let raw = format!(r#"<13>Feb 13 20:07:26 74794bfb6795 root[8539]: {}"#, msg); + let raw = format!(r#"<13>Feb 13 20:07:26 74794bfb6795 root[8539]: {msg}"#); let event = event_from_bytes("host", None, raw.into(), LogNamespace::Legacy).unwrap(); let mut expected = Event::Log(LogEvent::from(msg)); @@ -948,8 +948,7 @@ mod test { fn rsyslog_omfwd_tcp_default() { let msg = "start"; let raw = format!( - r#"<190>Feb 13 21:31:56 74794bfb6795 liblogging-stdlog: [origin software="rsyslogd" swVersion="8.24.0" x-pid="8979" x-info="http://www.rsyslog.com"] {}"#, - msg + r#"<190>Feb 13 21:31:56 74794bfb6795 liblogging-stdlog: [origin software="rsyslogd" swVersion="8.24.0" x-pid="8979" x-info="http://www.rsyslog.com"] {msg}"# ); let event = event_from_bytes("host", None, raw.into(), LogNamespace::Legacy).unwrap(); @@ -984,8 +983,7 @@ mod test { fn rsyslog_omfwd_tcp_forward_format() { let msg = "start"; let raw = format!( - r#"<190>2019-02-13T21:53:30.605850+00:00 74794bfb6795 liblogging-stdlog: [origin software="rsyslogd" swVersion="8.24.0" x-pid="9043" x-info="http://www.rsyslog.com"] {}"#, - msg + r#"<190>2019-02-13T21:53:30.605850+00:00 74794bfb6795 liblogging-stdlog: [origin software="rsyslogd" swVersion="8.24.0" x-pid="9043" x-info="http://www.rsyslog.com"] {msg}"# ); let mut expected = Event::Log(LogEvent::from(msg)); @@ -1264,7 +1262,7 @@ mod test { //"secfrac" can contain up to 6 digits, but TCP sinks uses `AutoSi` Self { - msgid: format!("test{}", id), + msgid: format!("test{id}"), severity: Severity::LOG_INFO, facility: Facility::LOG_USER, version: 1, @@ -1388,7 +1386,7 @@ mod test { x => { #[allow(clippy::print_stdout)] { - println!("converting severity str, got {}", x); + println!("converting severity str, got {x}"); } None } @@ -1440,7 +1438,7 @@ mod test { .filter(|m| !m.is_empty()) //syslog_rfc5424 ignores empty maps, tested separately .take(amount) .enumerate() - .map(|(i, map)| (format!("id{}", i), map)) + .map(|(i, map)| (format!("id{i}"), map)) .collect() } diff --git a/src/sources/util/framestream.rs b/src/sources/util/framestream.rs index 314f01069822d..f8ce2ddced2c6 100644 --- a/src/sources/util/framestream.rs +++ b/src/sources/util/framestream.rs @@ -422,8 +422,7 @@ pub fn build_framestream_unix_source( if let Some(socket_permission) = frame_handler.socket_file_mode() { if !(448..=511).contains(&socket_permission) { return Err(format!( - "Invalid Socket permission {:#o}. Must between 0o700 and 0o777.", - socket_permission + "Invalid Socket permission {socket_permission:#o}. Must between 0o700 and 0o777." ) .into()); } @@ -1116,7 +1115,7 @@ mod test { for i in 0..total_events { join_handles.push(spawn_event_handling_tasks( - Bytes::from(format!("event_{}", i)), + Bytes::from(format!("event_{i}")), MockFrameHandler::new("test_content".to_string(), true, extra_routine.clone()), out.clone(), None, diff --git a/src/sources/util/grpc/decompression.rs b/src/sources/util/grpc/decompression.rs index bf23e1761b179..7437a044e2d15 100644 --- a/src/sources/util/grpc/decompression.rs +++ b/src/sources/util/grpc/decompression.rs @@ -42,8 +42,7 @@ impl CompressionScheme { .map(|s| { s.to_str().map(|s| s.to_string()).map_err(|_| { Status::unimplemented(format!( - "`{}` contains non-visible characters and is not a valid encoding", - GRPC_ENCODING_HEADER + "`{GRPC_ENCODING_HEADER}` contains non-visible characters and is not a valid encoding" )) }) }) @@ -53,8 +52,7 @@ impl CompressionScheme { Some(scheme) => match scheme.as_str() { "gzip" => Ok(Some(CompressionScheme::Gzip)), other => Err(Status::unimplemented(format!( - "compression scheme `{}` is not supported", - other + "compression scheme `{other}` is not supported" ))), }, }) diff --git a/src/sources/util/http/auth.rs b/src/sources/util/http/auth.rs index a75788cf42c04..1ac000f763c05 100644 --- a/src/sources/util/http/auth.rs +++ b/src/sources/util/http/auth.rs @@ -41,7 +41,7 @@ impl TryFrom> for HttpSourceAuth { Some(value) => { let token = value .to_str() - .map_err(|error| format!("Failed stringify HeaderValue: {:?}", error))? + .map_err(|error| format!("Failed stringify HeaderValue: {error:?}"))? .to_owned(); Ok(HttpSourceAuth { token: Some(token) }) } diff --git a/src/sources/util/http/encoding.rs b/src/sources/util/http/encoding.rs index 9fc19117b711a..b2679b9cd925c 100644 --- a/src/sources/util/http/encoding.rs +++ b/src/sources/util/http/encoding.rs @@ -34,7 +34,7 @@ pub fn decode(header: &Option, mut body: Bytes) -> Result { return Err(ErrorMessage::new( StatusCode::UNSUPPORTED_MEDIA_TYPE, - format!("Unsupported encoding {}", encoding), + format!("Unsupported encoding {encoding}"), )) } } @@ -51,6 +51,6 @@ fn handle_decode_error(encoding: &str, error: impl std::error::Error) -> ErrorMe }); ErrorMessage::new( StatusCode::UNPROCESSABLE_ENTITY, - format!("Failed decompressing payload with {} decoder.", encoding), + format!("Failed decompressing payload with {encoding} decoder."), ) } diff --git a/src/sources/util/net/mod.rs b/src/sources/util/net/mod.rs index db008c5d52322..27fd760663d03 100644 --- a/src/sources/util/net/mod.rs +++ b/src/sources/util/net/mod.rs @@ -81,7 +81,7 @@ impl fmt::Display for SocketListenAddr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::SocketAddr(ref addr) => addr.fmt(f), - Self::SystemdFd(offset) => write!(f, "systemd socket #{}", offset), + Self::SystemdFd(offset) => write!(f, "systemd socket #{offset}"), } } } @@ -134,7 +134,7 @@ impl From for String { if fd == 0 { "systemd".to_owned() } else { - format!("systemd#{}", fd) + format!("systemd#{fd}") } } } diff --git a/src/sources/vector/mod.rs b/src/sources/vector/mod.rs index 21d6b1d4f9fc8..30f119ec1ece5 100644 --- a/src/sources/vector/mod.rs +++ b/src/sources/vector/mod.rs @@ -281,7 +281,7 @@ mod tests { }; async fn run_test(vector_source_config_str: &str, addr: SocketAddr) { - let config = format!(r#"address = "{}""#, addr); + let config = format!(r#"address = "{addr}""#); let source: VectorConfig = toml::from_str(&config).unwrap(); let (tx, rx) = SourceSender::new_test(); @@ -317,7 +317,7 @@ mod tests { let addr = test_util::next_addr(); assert_source_compliance(&SOURCE_TAGS, async { - let config = format!(r#"address = "{}""#, addr); + let config = format!(r#"address = "{addr}""#); run_test(&config, addr).await; }) .await; @@ -329,9 +329,8 @@ mod tests { assert_source_compliance(&SOURCE_TAGS, async { let config = format!( - r#"address = "{}" - compression=true"#, - addr + r#"address = "{addr}" + compression=true"# ); run_test(&config, addr).await; }) diff --git a/src/tap/cmd.rs b/src/tap/cmd.rs index 1b7c0a999b281..89d08e7ffdaf6 100644 --- a/src/tap/cmd.rs +++ b/src/tap/cmd.rs @@ -28,7 +28,7 @@ pub(crate) async fn cmd(opts: &super::Opts, mut signal_rx: SignalRx) -> exitcode // features; the config is available even if `api` is disabled. let mut url = opts.url.clone().unwrap_or_else(|| { let addr = config::api::default_address().unwrap(); - Url::parse(&format!("http://{}/graphql", addr)) + Url::parse(&format!("http://{addr}/graphql")) .expect("Couldn't parse default API URL. Please report this.") }); @@ -93,7 +93,7 @@ async fn run( Err(e) => { #[allow(clippy::print_stderr)] { - eprintln!("[tap] Couldn't connect to Vector API via WebSockets: {}", e); + eprintln!("[tap] Couldn't connect to Vector API via WebSockets: {e}"); } return exitcode::UNAVAILABLE; } diff --git a/src/test_util/components.rs b/src/test_util/components.rs index d36702ca11a0b..ff0ba5dcec0be 100644 --- a/src/test_util/components.rs +++ b/src/test_util/components.rs @@ -1,5 +1,6 @@ #![allow(clippy::print_stdout)] // tests #![allow(clippy::print_stderr)] // tests +#![allow(ungated_async_fn_track_caller)] // See https://github.com/rust-lang/rust/issues/87417 for more information #![deny(missing_docs)] //! This is a framework for testing components for their compliance with @@ -185,7 +186,7 @@ impl ComponentTester { event_test_util::debug_print_events(); metrics.sort_by(|a, b| a.name().cmp(b.name())); for metric in &metrics { - println!("{}", metric); + println!("{metric}"); } } @@ -224,10 +225,8 @@ impl ComponentTester { .collect::>(); let partial = partial_matches.join(""); - self.errors.push(format!( - " - Missing metric `{}{}`{}", - name, tag_suffix, partial - )); + self.errors + .push(format!(" - Missing metric `{name}{tag_suffix}`{partial}")); } } } @@ -235,7 +234,7 @@ impl ComponentTester { fn emitted_all_events(&mut self, names: &[&str]) { for name in names { if let Err(err_msg) = event_test_util::contains_name_once(name) { - self.errors.push(format!(" - {}", err_msg)); + self.errors.push(format!(" - {err_msg}")); } } } diff --git a/src/test_util/metrics.rs b/src/test_util/metrics.rs index 9a3bdca726b71..cb2cd3d85b608 100644 --- a/src/test_util/metrics.rs +++ b/src/test_util/metrics.rs @@ -162,29 +162,23 @@ macro_rules! series { pub fn assert_counter(metrics: &SplitMetrics, series: MetricSeries, expected: f64) { let actual_counter = read_counter_value(metrics, series.clone()); - assert!( - actual_counter.is_some(), - "counter '{}' was not found", - series - ); + assert!(actual_counter.is_some(), "counter '{series}' was not found"); let actual_counter_value = actual_counter.expect("counter must be valid"); assert_eq!( actual_counter_value, expected, - "expected {} for '{}', got {} instead", - expected, series, actual_counter_value + "expected {expected} for '{series}', got {actual_counter_value} instead" ); } pub fn assert_gauge(metrics: &SplitMetrics, series: MetricSeries, expected: f64) { let actual_gauge = read_gauge_value(metrics, series.clone()); - assert!(actual_gauge.is_some(), "gauge '{}' was not found", series); + assert!(actual_gauge.is_some(), "gauge '{series}' was not found"); let actual_gauge_value = actual_gauge.expect("gauge must be valid"); assert_eq!( actual_gauge_value, expected, - "expected {} for '{}', got {} instead", - expected, series, actual_gauge_value + "expected {expected} for '{series}', got {actual_gauge_value} instead" ); } @@ -196,7 +190,7 @@ pub fn assert_distribution( expected_bounds: &[(f64, u32)], ) { let samples = read_distribution_samples(metrics, series.clone()); - assert!(samples.is_some(), "distribution '{}' was not found", series); + assert!(samples.is_some(), "distribution '{series}' was not found"); let samples = samples.expect("distribution must be valid"); @@ -216,13 +210,11 @@ pub fn assert_distribution( assert_eq!( actual_sum, expected_sum, - "expected sum of '{}' to equal {}, got {} instead", - series, expected_sum, actual_sum + "expected sum of '{series}' to equal {expected_sum}, got {actual_sum} instead" ); assert_eq!( actual_count, expected_count, - "expected count of '{}' to equal {}, got {} instead", - series, expected_count, actual_count + "expected count of '{series}' to equal {expected_count}, got {actual_count} instead" ); for (i, (bound, count)) in expected_bounds.iter().enumerate() { @@ -236,7 +228,7 @@ pub fn assert_distribution( pub fn assert_set(metrics: &SplitMetrics, series: MetricSeries, expected_values: &[&str]) { let actual_values = read_set_values(metrics, series.clone()); - assert!(actual_values.is_some(), "set '{}' was not found", series); + assert!(actual_values.is_some(), "set '{series}' was not found"); let actual_values = actual_values.expect("set must be valid"); let expected_values = expected_values diff --git a/src/test_util/mock/sources/backpressure.rs b/src/test_util/mock/sources/backpressure.rs index e76600f539a24..8f7aeee3f5e65 100644 --- a/src/test_util/mock/sources/backpressure.rs +++ b/src/test_util/mock/sources/backpressure.rs @@ -43,7 +43,7 @@ impl SourceConfig for BackpressureSourceConfig { for i in 0.. { let _result = cx .out - .send_event(Event::Log(LogEvent::from(format!("event-{}", i)))) + .send_event(Event::Log(LogEvent::from(format!("event-{i}")))) .await; counter.fetch_add(1, Ordering::AcqRel); diff --git a/src/test_util/mod.rs b/src/test_util/mod.rs index 7c45c0f572544..6a2f147e57464 100644 --- a/src/test_util/mod.rs +++ b/src/test_util/mod.rs @@ -94,7 +94,7 @@ where let cfg = toml::to_string(&T::generate_config()).unwrap(); toml::from_str::(&cfg) - .unwrap_or_else(|e| panic!("Invalid config generated from string:\n\n{}\n'{}'", e, cfg)); + .unwrap_or_else(|e| panic!("Invalid config generated from string:\n\n{e}\n'{cfg}'")); } pub fn open_fixture(path: impl AsRef) -> crate::Result { diff --git a/src/top/cmd.rs b/src/top/cmd.rs index 0c7821d800ada..7c894a43abe64 100644 --- a/src/top/cmd.rs +++ b/src/top/cmd.rs @@ -32,7 +32,7 @@ pub async fn cmd(opts: &super::Opts) -> exitcode::ExitCode { // features; the config is available even if `api` is disabled let url = opts.url.clone().unwrap_or_else(|| { let addr = config::api::default_address().unwrap(); - Url::parse(&format!("http://{}/graphql", addr)) + Url::parse(&format!("http://{addr}/graphql")) .expect("Couldn't parse default API URL. Please report this.") }); diff --git a/src/top/dashboard.rs b/src/top/dashboard.rs index 2b3edb888dbf9..c1014de1967d2 100644 --- a/src/top/dashboard.rs +++ b/src/top/dashboard.rs @@ -71,7 +71,7 @@ impl HumanFormatter for i64 { 0 => "--".into(), n => match NumberPrefix::decimal(*n as f64) { NumberPrefix::Standalone(n) => n.to_string(), - NumberPrefix::Prefixed(p, n) => format!("{:.2} {}", n, p), + NumberPrefix::Prefixed(p, n) => format!("{n:.2} {p}"), }, } } @@ -83,7 +83,7 @@ impl HumanFormatter for i64 { 0 => "--".into(), n => match NumberPrefix::binary(*n as f64) { NumberPrefix::Standalone(n) => n.to_string(), - NumberPrefix::Prefixed(p, n) => format!("{:.2} {}B", n, p), + NumberPrefix::Prefixed(p, n) => format!("{n:.2} {p}B"), }, } } diff --git a/src/topology/builder.rs b/src/topology/builder.rs index 422dd641df21d..ce0fb57366541 100644 --- a/src/topology/builder.rs +++ b/src/topology/builder.rs @@ -95,7 +95,7 @@ pub(self) async fn load_enrichment_tables<'a>( let mut table = match table.inner.build(&config.global).await { Ok(table) => table, Err(error) => { - errors.push(format!("Enrichment Table \"{}\": {}", name, error)); + errors.push(format!("Enrichment Table \"{name}\": {error}")); continue; } }; @@ -272,7 +272,7 @@ pub async fn build_pieces( }; let server = match source.inner.build(context).await { Err(error) => { - errors.push(format!("Source \"{}\": {}", key, error)); + errors.push(format!("Source \"{key}\": {error}")); continue; } Ok(server) => server, @@ -390,7 +390,7 @@ pub async fn build_pieces( .await { Err(error) => { - errors.push(format!("Transform \"{}\": {}", key, error)); + errors.push(format!("Transform \"{key}\": {error}")); continue; } Ok(transform) => transform, @@ -454,7 +454,7 @@ pub async fn build_pieces( .await; match buffer { Err(error) => { - errors.push(format!("Sink \"{}\": {}", key, error)); + errors.push(format!("Sink \"{key}\": {error}")); continue; } Ok((tx, rx)) => (tx, Arc::new(Mutex::new(Some(rx.into_stream())))), @@ -470,7 +470,7 @@ pub async fn build_pieces( let (sink, healthcheck) = match sink.inner.build(cx).await { Err(error) => { - errors.push(format!("Sink \"{}\": {}", key, error)); + errors.push(format!("Sink \"{key}\": {error}")); continue; } Ok(built) => built, diff --git a/src/topology/schema.rs b/src/topology/schema.rs index 9f4dff0e1cb03..4547289ba0b73 100644 --- a/src/topology/schema.rs +++ b/src/topology/schema.rs @@ -240,7 +240,7 @@ pub(super) fn validate_sink_expectations( .errors() .iter() .cloned() - .map(|err| format!("schema error in component {}: {}", key, err)) + .map(|err| format!("schema error in component {key}: {err}")) .collect(), ); } diff --git a/src/topology/test/end_to_end.rs b/src/topology/test/end_to_end.rs index 33022aa0c250c..0925567a9f795 100644 --- a/src/topology/test/end_to_end.rs +++ b/src/topology/test/end_to_end.rs @@ -64,7 +64,7 @@ pub fn http_client( let sender = tokio::spawn(async move { let result = reqwest::Client::new() - .post(&format!("http://{}/", address)) + .post(&format!("http://{address}/")) .body(body) .send() .await @@ -94,8 +94,6 @@ inputs = ["in"] encoding.codec = "json" uri = "http://{address2}/" "#, - address1 = address1, - address2 = address2, ), Format::Toml, ) diff --git a/src/trace.rs b/src/trace.rs index 8fc9f5c497013..ee79e2a721c46 100644 --- a/src/trace.rs +++ b/src/trace.rs @@ -371,6 +371,6 @@ impl tracing::field::Visit for SpanFields { } fn record_debug(&mut self, field: &tracing_core::Field, value: &dyn std::fmt::Debug) { - self.record(field, format!("{:?}", value)); + self.record(field, format!("{value:?}")); } } diff --git a/src/transforms/aws_ec2_metadata.rs b/src/transforms/aws_ec2_metadata.rs index c45948e24cc0c..fc5501c9fc3b6 100644 --- a/src/transforms/aws_ec2_metadata.rs +++ b/src/transforms/aws_ec2_metadata.rs @@ -485,10 +485,8 @@ impl MetadataClient { let mac = String::from_utf8_lossy(&mac[..]); if self.fields.contains(SUBNET_ID_KEY) { - let subnet_path = format!( - "/latest/meta-data/network/interfaces/macs/{}/subnet-id", - mac - ); + let subnet_path = + format!("/latest/meta-data/network/interfaces/macs/{mac}/subnet-id"); let subnet_path = subnet_path.parse().context(ParsePathSnafu { value: subnet_path.clone(), @@ -501,7 +499,7 @@ impl MetadataClient { if self.fields.contains(VPC_ID_KEY) { let vpc_path = - format!("/latest/meta-data/network/interfaces/macs/{}/vpc-id", mac); + format!("/latest/meta-data/network/interfaces/macs/{mac}/vpc-id"); let vpc_path = vpc_path.parse().context(ParsePathSnafu { value: vpc_path.clone(), @@ -538,7 +536,7 @@ impl MetadataClient { } for tag in self.tags.clone() { - let tag_path = format!("/latest/meta-data/tags/instance/{}", tag); + let tag_path = format!("/latest/meta-data/tags/instance/{tag}"); let tag_path = tag_path.parse().context(ParsePathSnafu { value: tag_path.clone(), @@ -855,7 +853,7 @@ mod integration_tests { let _server = tokio::spawn(server); let config = Ec2Metadata { - endpoint: format!("http://{}", addr), + endpoint: format!("http://{addr}"), refresh_timeout_secs: Duration::from_secs(1), ..Default::default() }; @@ -886,7 +884,7 @@ mod integration_tests { let _server = tokio::spawn(server); let config = Ec2Metadata { - endpoint: format!("http://{}", addr), + endpoint: format!("http://{addr}"), refresh_timeout_secs: Duration::from_secs(1), required: false, ..Default::default() @@ -965,10 +963,10 @@ mod integration_tests { let log = LogEvent::default(); let mut expected_log = log.clone(); - expected_log.insert(format!("\"{}\"", PUBLIC_IPV4_KEY).as_str(), "192.0.2.54"); - expected_log.insert(format!("\"{}\"", REGION_KEY).as_str(), "us-east-1"); + expected_log.insert(format!("\"{PUBLIC_IPV4_KEY}\"").as_str(), "192.0.2.54"); + expected_log.insert(format!("\"{REGION_KEY}\"").as_str(), "us-east-1"); expected_log.insert( - format!("\"{}\"", TAGS_KEY).as_str(), + format!("\"{TAGS_KEY}\"").as_str(), BTreeMap::from([ ("Name".to_string(), Value::from("test-instance")), ("Test".to_string(), Value::from("test-tag")), diff --git a/src/transforms/lua/v1/mod.rs b/src/transforms/lua/v1/mod.rs index 73a165970c44e..ec13921e3e003 100644 --- a/src/transforms/lua/v1/mod.rs +++ b/src/transforms/lua/v1/mod.rs @@ -100,7 +100,7 @@ impl Lua { let additional_paths = search_dirs .iter() - .map(|d| format!("{}/?.lua", d)) + .map(|d| format!("{d}/?.lua")) .collect::>() .join(";"); @@ -112,7 +112,7 @@ impl Lua { let current_paths = package .get::<_, String>("path") .unwrap_or_else(|_| ";".to_string()); - let paths = format!("{};{}", additional_paths, current_paths); + let paths = format!("{additional_paths};{current_paths}"); package.set("path", paths).context(InvalidLuaSnafu)?; } diff --git a/src/transforms/lua/v2/mod.rs b/src/transforms/lua/v2/mod.rs index e2515c2bb6ea1..453b4c73c194d 100644 --- a/src/transforms/lua/v2/mod.rs +++ b/src/transforms/lua/v2/mod.rs @@ -232,7 +232,7 @@ impl Lua { let current_paths = package .get::<_, String>("path") .unwrap_or_else(|_| ";".to_string()); - let paths = format!("{};{}", additional_paths, current_paths); + let paths = format!("{additional_paths};{current_paths}"); package.set("path", paths)?; } @@ -968,8 +968,7 @@ mod tests { "#, |tx, out| async move { let n: usize = 10; - let events = - (0..n).map(|i| Event::Log(LogEvent::from(format!("program me {}", i)))); + let events = (0..n).map(|i| Event::Log(LogEvent::from(format!("program me {i}")))); for event in events { tx.send(event).await.unwrap(); assert!(out.lock().await.recv().await.is_some()); diff --git a/src/transforms/reduce/merge_strategy.rs b/src/transforms/reduce/merge_strategy.rs index 7cb954fa91e78..92a1a5a4033de 100644 --- a/src/transforms/reduce/merge_strategy.rs +++ b/src/transforms/reduce/merge_strategy.rs @@ -327,7 +327,7 @@ impl ReduceValueMerger for TimestampWindowMerger { } fn insert_into(self: Box, k: String, v: &mut LogEvent) -> Result<(), String> { - v.insert(format!("{}_end", k).as_str(), Value::Timestamp(self.latest)); + v.insert(format!("{k}_end").as_str(), Value::Timestamp(self.latest)); v.insert(k.as_str(), Value::Timestamp(self.started)); Ok(()) } diff --git a/src/transforms/remap.rs b/src/transforms/remap.rs index 8026b92e59340..5a8725b713f54 100644 --- a/src/transforms/remap.rs +++ b/src/transforms/remap.rs @@ -462,16 +462,16 @@ where }, Event::Metric(ref mut metric) => { let m = log_schema().metadata_key(); - metric.replace_tag(format!("{}.dropped.reason", m), reason.into()); + metric.replace_tag(format!("{m}.dropped.reason"), reason.into()); metric.replace_tag( - format!("{}.dropped.component_id", m), + format!("{m}.dropped.component_id"), self.component_key .as_ref() .map(ToString::to_string) .unwrap_or_else(String::new), ); - metric.replace_tag(format!("{}.dropped.component_type", m), "remap".into()); - metric.replace_tag(format!("{}.dropped.component_kind", m), "transform".into()); + metric.replace_tag(format!("{m}.dropped.component_type"), "remap".into()); + metric.replace_tag(format!("{m}.dropped.component_kind"), "transform".into()); } Event::Trace(ref mut trace) => { trace.insert( @@ -1490,7 +1490,7 @@ mod tests { match (buf.pop(), err_buf.pop()) { (Some(good), None) => Ok(good), (None, Some(bad)) => Err(bad), - (a, b) => panic!("expected output xor error output, got {:?} and {:?}", a, b), + (a, b) => panic!("expected output xor error output, got {a:?} and {b:?}"), } } diff --git a/src/transforms/sample.rs b/src/transforms/sample.rs index d68c3a28cfced..f7c03e29a8586 100644 --- a/src/transforms/sample.rs +++ b/src/transforms/sample.rs @@ -151,7 +151,7 @@ mod tests { fn condition_contains(key: &str, needle: &str) -> Condition { let vrl_config = VrlConfig { - source: format!(r#"contains!(."{}", "{}")"#, key, needle), + source: format!(r#"contains!(."{key}", "{needle}")"#), runtime: Default::default(), }; diff --git a/src/transforms/tag_cardinality_limit/tag_value_set.rs b/src/transforms/tag_cardinality_limit/tag_value_set.rs index 599a021df110b..1f5f26aeb0fe3 100644 --- a/src/transforms/tag_cardinality_limit/tag_value_set.rs +++ b/src/transforms/tag_cardinality_limit/tag_value_set.rs @@ -19,7 +19,7 @@ enum TagValueSetStorage { impl fmt::Debug for TagValueSetStorage { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - TagValueSetStorage::Set(set) => write!(f, "Set({:?})", set), + TagValueSetStorage::Set(set) => write!(f, "Set({set:?})"), TagValueSetStorage::Bloom(_) => write!(f, "Bloom"), } } diff --git a/src/unit_test.rs b/src/unit_test.rs index 59f3a6c7251bc..5dbd7530d1594 100644 --- a/src/unit_test.rs +++ b/src/unit_test.rs @@ -111,12 +111,12 @@ pub async fn cmd(opts: &Opts, signal_handler: &mut signal::SignalHandler) -> exi for (test_name, fails) in aggregated_test_errors { #[allow(clippy::print_stdout)] { - println!("\ntest {}:\n", test_name); + println!("\ntest {test_name}:\n"); } for fail in fails { #[allow(clippy::print_stdout)] { - println!("{}\n", fail); + println!("{fail}\n"); } } } diff --git a/src/validate.rs b/src/validate.rs index 8d80b3e26ce3c..590424e1faa30 100644 --- a/src/validate.rs +++ b/src/validate.rs @@ -227,17 +227,17 @@ async fn validate_healthchecks( .healthcheck() .enabled { - fmt.success(format!("Health check \"{}\"", id)); + fmt.success(format!("Health check \"{id}\"")); } else { - fmt.warning(format!("Health check disabled for \"{}\"", id)); + fmt.warning(format!("Health check disabled for \"{id}\"")); validated &= !opts.deny_warnings; } } - Ok(Err(e)) => failed(format!("Health check for \"{}\" failed: {}", id, e)), + Ok(Err(e)) => failed(format!("Health check for \"{id}\" failed: {e}")), Err(error) if error.is_cancelled() => { - failed(format!("Health check for \"{}\" was cancelled", id)) + failed(format!("Health check for \"{id}\" was cancelled")) } - Err(_) => failed(format!("Health check for \"{}\" panicked", id)), + Err(_) => failed(format!("Health check for \"{id}\" panicked")), } } diff --git a/vdev/src/app.rs b/vdev/src/app.rs index 9f5f9b3f6029a..832c91a80e424 100644 --- a/vdev/src/app.rs +++ b/vdev/src/app.rs @@ -106,10 +106,7 @@ impl CommandExt for Command { let result = self.output(); progress_bar.finish_and_clear(); - let output = match result { - Ok(output) => output, - Err(_) => bail!("could not run command"), - }; + let Ok(output) = result else { bail!("could not run command") }; if output.status.success() { Ok(())