Skip to content

chore(deps): Update Rust to 1.67.1 #16212

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 5 additions & 7 deletions build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ impl TrackedEnv {

pub fn emit_rerun_stanzas(&self) {
for env_var in &self.tracked {
println!("cargo:rerun-if-env-changed={}", env_var);
println!("cargo:rerun-if-env-changed={env_var}");
}
}
}
Expand All @@ -33,9 +33,9 @@ enum ConstantValue {
impl ConstantValue {
pub fn as_parts(&self) -> (&'static str, String) {
match &self {
ConstantValue::Required(value) => ("&str", format!("\"{}\"", value)),
ConstantValue::Required(value) => ("&str", format!("\"{value}\"")),
ConstantValue::Optional(value) => match value {
Some(value) => ("Option<&str>", format!("Some(\"{}\")", value)),
Some(value) => ("Option<&str>", format!("Some(\"{value}\")")),
None => ("Option<&str>", "None".to_string()),
},
}
Expand Down Expand Up @@ -79,10 +79,8 @@ impl BuildConstants {

for (name, desc, value) in self.values {
let (const_type, const_val) = value.as_parts();
let full = format!(
"#[doc=r#\"{}\"#]\npub const {}: {} = {};\n",
desc, name, const_type, const_val
);
let full =
format!("#[doc=r#\"{desc}\"#]\npub const {name}: {const_type} = {const_val};\n");
output_file.write_all(full.as_ref())?;
}

Expand Down
21 changes: 9 additions & 12 deletions lib/codecs/src/decoding/format/gelf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,11 +84,9 @@ impl GelfDeserializer {

// GELF spec defines the version as 1.1 which has not changed since 2013
if parsed.version != GELF_VERSION {
return Err(format!(
"{} does not match GELF spec version ({})",
VERSION, GELF_VERSION
)
.into());
return Err(
format!("{VERSION} does not match GELF spec version ({GELF_VERSION})").into(),
);
}

log.insert(VERSION, parsed.version.to_string());
Expand Down Expand Up @@ -140,16 +138,15 @@ impl GelfDeserializer {
// per GELF spec, Additional field names must be prefixed with an underscore
if !key.starts_with('_') {
return Err(format!(
"'{}' field is invalid. \
Additional field names must be prefixed with an underscore.",
key
"'{key}' field is invalid. \
Additional field names must be prefixed with an underscore."
)
.into());
}
// per GELF spec, Additional field names must be characters dashes or dots
if !VALID_FIELD_REGEX.is_match(key) {
return Err(format!("'{}' field contains invalid characters. Field names may \
contain only letters, numbers, underscores, dashes and dots.", key).into());
return Err(format!("'{key}' field contains invalid characters. Field names may \
contain only letters, numbers, underscores, dashes and dots.").into());
}

// per GELF spec, Additional field values must be either strings or numbers
Expand All @@ -165,8 +162,8 @@ impl GelfDeserializer {
serde_json::Value::Array(_) => "array",
serde_json::Value::Object(_) => "object",
};
return Err(format!("The value type for field {} is an invalid type ({}). Additional field values \
should be either strings or numbers.", key, type_).into());
return Err(format!("The value type for field {key} is an invalid type ({type_}). Additional field values \
should be either strings or numbers.").into());
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion lib/codecs/src/decoding/format/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ impl Deserializer for JsonDeserializer {
}

let json: serde_json::Value = serde_json::from_slice(&bytes)
.map_err(|error| format!("Error parsing JSON: {:?}", error))?;
.map_err(|error| format!("Error parsing JSON: {error:?}"))?;

// If the root is an Array, split it into multiple events
let mut events = match json {
Expand Down
2 changes: 1 addition & 1 deletion lib/codecs/src/decoding/format/native_json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl Deserializer for NativeJsonDeserializer {
}

let json: serde_json::Value = serde_json::from_slice(&bytes)
.map_err(|error| format!("Error parsing JSON: {:?}", error))?;
.map_err(|error| format!("Error parsing JSON: {error:?}"))?;

let events = match json {
serde_json::Value::Array(values) => values
Expand Down
2 changes: 1 addition & 1 deletion lib/codecs/src/decoding/framing/octet_counting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ mod tests {
buffer.put(&b"defghijklmnopqrstuvwxyzand here we are"[..]);
let result = decoder.decode(&mut buffer);

println!("{:?}", result);
println!("{result:?}");
assert!(result.is_err());
assert_eq!(b"and here we are"[..], buffer);
}
Expand Down
4 changes: 2 additions & 2 deletions lib/codecs/src/decoding/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ pub enum Error {
impl std::fmt::Display for Error {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::FramingError(error) => write!(formatter, "FramingError({})", error),
Self::ParsingError(error) => write!(formatter, "ParsingError({})", error),
Self::FramingError(error) => write!(formatter, "FramingError({error})"),
Self::ParsingError(error) => write!(formatter, "ParsingError({error})"),
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion lib/codecs/src/encoding/format/avro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ impl AvroSerializerConfig {
/// Build the `AvroSerializer` from this configuration.
pub fn build(&self) -> Result<AvroSerializer, BuildError> {
let schema = apache_avro::Schema::parse_str(&self.avro.schema)
.map_err(|error| format!("Failed building Avro serializer: {}", error))?;
.map_err(|error| format!("Failed building Avro serializer: {error}"))?;
Ok(AvroSerializer { schema })
}

Expand Down
4 changes: 2 additions & 2 deletions lib/codecs/src/encoding/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ pub enum Error {
impl std::fmt::Display for Error {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::FramingError(error) => write!(formatter, "FramingError({})", error),
Self::SerializingError(error) => write!(formatter, "SerializingError({})", error),
Self::FramingError(error) => write!(formatter, "FramingError({error})"),
Self::SerializingError(error) => write!(formatter, "SerializingError({error})"),
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion lib/codecs/tests/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ fn rebuild_fixtures(proto: &str, deserializer: &dyn Deserializer, serializer: &m
.into_iter()
.collect();
let mut out = File::create(&new_path).unwrap_or_else(|error| {
panic!("Could not create rebuilt file {:?}: {:?}", new_path, error)
panic!("Could not create rebuilt file {new_path:?}: {error:?}")
});
out.write_all(&buf).expect("Could not write rebuilt data");
out.flush().expect("Could not write rebuilt data");
Expand Down
4 changes: 2 additions & 2 deletions lib/datadog/grok/src/filters/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,12 @@ pub fn parse<'a>(
delimiter: Option<&'a str>,
) -> Result<Vec<Value>, String> {
let result = parse_array(brackets, delimiter)(input)
.map_err(|_| format!("could not parse '{}' as array", input))
.map_err(|_| format!("could not parse '{input}' as array"))
.and_then(|(rest, result)| {
rest.trim()
.is_empty()
.then_some(result)
.ok_or_else(|| format!("could not parse '{}' as array", input))
.ok_or_else(|| format!("could not parse '{input}' as array"))
})?;

Ok(result)
Expand Down
2 changes: 1 addition & 1 deletion lib/datadog/grok/src/filters/keyvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ fn parse<'a>(
quotes,
value_re,
)
.map_err(|_| format!("could not parse '{}' as 'keyvalue'", input))?;
.map_err(|_| format!("could not parse '{input}' as 'keyvalue'"))?;

if rest.trim().is_empty() {
Ok(result)
Expand Down
12 changes: 6 additions & 6 deletions lib/datadog/grok/src/grok.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,13 +180,13 @@ impl Grok {

if let Some(definition) = m.at(DEFINITION_INDEX) {
self.insert_definition(raw_pattern, definition);
name = format!("{}={}", name, definition);
name = format!("{name}={definition}");
}

// Since a pattern with a given name can show up more than once, we need to
// loop through the number of matches found and apply the transformations
// on each of them.
for _ in 0..named_regex.matches(&format!("%{{{}}}", name)).count() {
for _ in 0..named_regex.matches(&format!("%{{{name}}}")).count() {
// Check if we have a definition for the raw pattern key and fail quickly
// if not.
let pattern_definition = match self.definitions.get(raw_pattern) {
Expand All @@ -200,7 +200,7 @@ impl Grok {
// engine understands and uses a named group.

let replacement = if with_alias_only && m.at(ALIAS_INDEX).is_none() {
format!("(?:{})", pattern_definition)
format!("(?:{pattern_definition})")
} else {
// If an alias is specified by the user use that one to
// match the name<index> conversion, otherwise just use
Expand All @@ -210,16 +210,16 @@ impl Grok {
Some(a) => String::from(a),
None => name.clone(),
},
format!("name{}", index),
format!("name{index}"),
);

format!("(?<name{}>{})", index, pattern_definition)
format!("(?<name{index}>{pattern_definition})")
};

// Finally, look for the original %{...} style pattern and
// replace it with our replacement (only the first occurrence
// since we are iterating one by one).
named_regex = named_regex.replacen(&format!("%{{{}}}", name), &replacement, 1);
named_regex = named_regex.replacen(&format!("%{{{name}}}"), &replacement, 1);

index += 1;
}
Expand Down
8 changes: 4 additions & 4 deletions lib/datadog/grok/src/matchers/date.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ pub fn convert_time_format(format: &str) -> std::result::Result<String, String>
time_format.push_str("%:z");
}
}
_ => return Err(format!("invalid date format '{}'", format)),
_ => return Err(format!("invalid date format '{format}'")),
}
} else if c == '\''
// quoted literal
Expand Down Expand Up @@ -125,9 +125,9 @@ fn parse_offset(tz: &str) -> Result<FixedOffset, String> {
}
let offset_format = if tz.contains(':') { "%:z" } else { "%z" };
// apparently the easiest way to parse tz offset is parsing the complete datetime
let date_str = format!("2020-04-12 22:10:57 {}", tz);
let date_str = format!("2020-04-12 22:10:57 {tz}");
let datetime =
DateTime::parse_from_str(&date_str, &format!("%Y-%m-%d %H:%M:%S {}", offset_format))
DateTime::parse_from_str(&date_str, &format!("%Y-%m-%d %H:%M:%S {offset_format}"))
.map_err(|e| e.to_string())?;
Ok(datetime.timezone())
}
Expand Down Expand Up @@ -203,7 +203,7 @@ pub fn time_format_to_regex(
}
with_tz = true;
}
_ => return Err(format!("invalid date format '{}'", format)),
_ => return Err(format!("invalid date format '{format}'")),
}
} else if c == '\'' {
// quoted literal
Expand Down
12 changes: 6 additions & 6 deletions lib/datadog/grok/src/parse_grok_pattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub fn parse_grok_pattern(input: &str) -> Result<GrokPattern, String> {
.parse(input, lexer)
.map_err(|e| match e {
ParseError::User { error } => error.to_string(),
_ => format!("invalid grok pattern: {}", input),
_ => format!("invalid grok pattern: {input}"),
})
}

Expand All @@ -41,7 +41,7 @@ mod tests {
fn parse_grok_filter() {
let input = r#"%{date:e-http.status.abc[".\""]:integer("a. df",.123,1.23e-32, true, null, 123e-5)}"#;
let parsed = parse_grok_pattern(input).unwrap_or_else(|error| {
panic!("Problem parsing grok: {:?}", error);
panic!("Problem parsing grok: {error:?}");
});
assert_eq!(parsed.match_fn.name, "date");
let destination = parsed.destination.unwrap();
Expand Down Expand Up @@ -72,7 +72,7 @@ mod tests {
fn empty_field() {
let input = r#"%{data:}"#;
let parsed = parse_grok_pattern(input).unwrap_or_else(|error| {
panic!("Problem parsing grok: {:?}", error);
panic!("Problem parsing grok: {error:?}");
});
assert_eq!(parsed.destination, None);
}
Expand All @@ -81,7 +81,7 @@ mod tests {
fn escaped_quotes() {
let input = r#"%{data:field:filter("escaped \"quotes\"")}"#;
let parsed = parse_grok_pattern(input).unwrap_or_else(|error| {
panic!("Problem parsing grok: {:?}", error);
panic!("Problem parsing grok: {error:?}");
});
assert_eq!(
parsed.destination,
Expand All @@ -99,7 +99,7 @@ mod tests {
fn empty_field_with_filter() {
let input = r#"%{data::json}"#;
let parsed = parse_grok_pattern(input).unwrap_or_else(|error| {
panic!("Problem parsing grok: {:?}", error);
panic!("Problem parsing grok: {error:?}");
});
assert_eq!(
parsed.destination,
Expand All @@ -126,7 +126,7 @@ mod tests {
fn escaped_new_line() {
let input = r#"%{data::array("\\n")}"#;
let parsed = parse_grok_pattern(input).unwrap_or_else(|error| {
panic!("Problem parsing grok: {:?}", error);
panic!("Problem parsing grok: {error:?}");
});
assert_eq!(
parsed.destination,
Expand Down
2 changes: 1 addition & 1 deletion lib/datadog/grok/src/parse_grok_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ fn resolve_grok_pattern(
resolves_match_function(grok_alias.clone(), pattern, context)?;

if let Some(grok_alias) = &grok_alias {
context.append_regex(&format!(":{}", grok_alias));
context.append_regex(&format!(":{grok_alias}"));
}
context.append_regex("}");
}
Expand Down
12 changes: 6 additions & 6 deletions lib/datadog/search-syntax/src/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ pub enum ComparisonValue {
impl std::fmt::Display for ComparisonValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::String(s) => write!(f, "{}", s),
Self::Integer(num) => write!(f, "{}", num),
Self::Float(num) => write!(f, "{}", num),
Self::String(s) => write!(f, "{s}"),
Self::Integer(num) => write!(f, "{num}"),
Self::Float(num) => write!(f, "{num}"),
Self::Unbounded => write!(f, "*"),
}
}
Expand Down Expand Up @@ -181,8 +181,8 @@ impl QueryNode {
match self {
QueryNode::MatchAllDocs => String::from("*:*"),
QueryNode::MatchNoDocs => String::from("-*:*"),
QueryNode::AttributeExists { attr } => format!("_exists_:{}", attr),
QueryNode::AttributeMissing { attr } => format!("_missing_:{}", attr),
QueryNode::AttributeExists { attr } => format!("_exists_:{attr}"),
QueryNode::AttributeMissing { attr } => format!("_missing_:{attr}"),
QueryNode::AttributeRange {
attr,
lower,
Expand Down Expand Up @@ -339,7 +339,7 @@ impl QueryNode {
if attr == DEFAULT_FIELD {
String::new()
} else {
format!("{}:", attr)
format!("{attr}:")
}
}
}
Expand Down
Loading