Skip to content

Commit 5f6e07e

Browse files
authored
chore(deps): Upgrade to Rust 1.50.0 (#6428)
* chore(deps): Upgrade Rust to 1.50.0 * Replace deprecated compare_and_swap I believe this is the equivalent compare_exchange based on the table in the docs: Original Success Failure Relaxed Relaxed Relaxed Acquire Acquire Acquire Release Release Relaxed AcqRel AcqRel Acquire SeqCst SeqCst SeqCst https://doc.rust-lang.org/std/sync/atomic/struct.AtomicUsize.html#migrating-to-compare_exchange-and-compare_exchange_weak Also resolve all clippy errors. Signed-off-by: Jesse Szwedko <[email protected]>
1 parent fafbdb2 commit 5f6e07e

File tree

39 files changed

+238
-222
lines changed

39 files changed

+238
-222
lines changed

lib/prometheus-parser/src/lib.rs

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -605,7 +605,8 @@ mod test {
605605
assert!(matches!(
606606
error,
607607
ParserError::WithLine {
608-
kind: ErrorKind::ExpectedChar { expected: ',', .. }, ..
608+
kind: ErrorKind::ExpectedChar { expected: ',', .. },
609+
..
609610
}
610611
));
611612

@@ -614,7 +615,8 @@ mod test {
614615
assert!(matches!(
615616
error,
616617
ParserError::WithLine {
617-
kind: ErrorKind::InvalidMetricKind { .. }, ..
618+
kind: ErrorKind::InvalidMetricKind { .. },
619+
..
618620
}
619621
));
620622

@@ -623,7 +625,8 @@ mod test {
623625
assert!(matches!(
624626
error,
625627
ParserError::WithLine {
626-
kind: ErrorKind::ExpectedSpace { .. }, ..
628+
kind: ErrorKind::ExpectedSpace { .. },
629+
..
627630
}
628631
));
629632

@@ -632,7 +635,8 @@ mod test {
632635
assert!(matches!(
633636
error,
634637
ParserError::WithLine {
635-
kind: ErrorKind::ExpectedChar { expected: '"', .. }, ..
638+
kind: ErrorKind::ExpectedChar { expected: '"', .. },
639+
..
636640
}
637641
));
638642

@@ -641,7 +645,8 @@ mod test {
641645
assert!(matches!(
642646
error,
643647
ParserError::WithLine {
644-
kind: ErrorKind::ExpectedChar { expected: '"', .. }, ..
648+
kind: ErrorKind::ExpectedChar { expected: '"', .. },
649+
..
645650
}
646651
));
647652

@@ -650,7 +655,8 @@ mod test {
650655
assert!(matches!(
651656
error,
652657
ParserError::WithLine {
653-
kind: ErrorKind::ParseFloatError { .. }, ..
658+
kind: ErrorKind::ParseFloatError { .. },
659+
..
654660
}
655661
));
656662
}

lib/prometheus-parser/src/line.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -620,10 +620,7 @@ mod test {
620620

621621
let input = wrap(r#"{ a="b" ,, c="d" }"#);
622622
let error = Metric::parse_labels(&input).unwrap_err().into();
623-
assert!(matches!(
624-
error,
625-
ErrorKind::ParseNameError { .. }
626-
));
623+
assert!(matches!(error, ErrorKind::ParseNameError { .. }));
627624
}
628625

629626
#[test]

lib/shared/src/conversion.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -184,11 +184,11 @@ fn parse_bool(s: &str) -> Result<bool, Error> {
184184

185185
/// Does the format specifier have a time zone option?
186186
fn format_has_zone(fmt: &str) -> bool {
187-
fmt.find("%Z").is_some()
188-
|| fmt.find("%z").is_some()
189-
|| fmt.find("%:z").is_some()
190-
|| fmt.find("%#z").is_some()
191-
|| fmt.find("%+").is_some()
187+
fmt.contains("%Z")
188+
|| fmt.contains("%z")
189+
|| fmt.contains("%:z")
190+
|| fmt.contains("%#z")
191+
|| fmt.contains("%+")
192192
}
193193

194194
/// Convert a timestamp with a non-UTC time zone into UTC

lib/vrl/cli/src/cmd.rs

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -73,14 +73,13 @@ fn run(opts: &Opts) -> Result<(), Error> {
7373
}
7474
}
7575

76-
#[cfg(feature = "repl")]
7776
fn repl(objects: Vec<Value>) -> Result<(), Error> {
78-
repl::run(objects)
79-
}
80-
81-
#[cfg(not(feature = "repl"))]
82-
fn repl(_: Vec<Value>) -> Result<(), Error> {
83-
Err(Error::ReplFeature)
77+
if cfg!(feature = "repl") {
78+
repl::run(objects);
79+
Ok(())
80+
} else {
81+
Err(Error::ReplFeature)
82+
}
8483
}
8584

8685
fn execute(object: &mut impl Target, source: String) -> Result<Value, Error> {

lib/vrl/cli/src/lib.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ pub enum Error {
1818
#[error("json error")]
1919
Json(#[from] serde_json::Error),
2020

21-
#[cfg(not(feature = "repl"))]
2221
#[error("repl feature disabled, program input required")]
2322
ReplFeature,
2423
}

lib/vrl/cli/src/repl.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
use crate::Error;
21
use indoc::indoc;
32
use lazy_static::lazy_static;
43
use prettytable::{format, Cell, Row, Table};
@@ -20,7 +19,7 @@ lazy_static! {
2019
const DOCS_URL: &str = "https://vector.dev/docs/reference/vrl";
2120
const ERRORS_URL_ROOT: &str = "https://errors.vrl.dev";
2221

23-
pub(crate) fn run(mut objects: Vec<Value>) -> Result<(), Error> {
22+
pub(crate) fn run(mut objects: Vec<Value>) {
2423
let mut index = 0;
2524
let func_docs_regex = Regex::new(r"^help\sdocs\s(\w{1,})$").unwrap();
2625
let error_docs_regex = Regex::new(r"^help\serror\s(\w{1,})$").unwrap();
@@ -92,8 +91,6 @@ pub(crate) fn run(mut objects: Vec<Value>) -> Result<(), Error> {
9291
}
9392
}
9493
}
95-
96-
Ok(())
9794
}
9895

9996
fn resolve(

lib/vrl/parser/src/lex.rs

Lines changed: 43 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ use std::iter::Peekable;
55
use std::str::CharIndices;
66

77
pub type Tok<'input> = Token<&'input str>;
8-
pub type Spanned<'input, Loc> = Result<(Loc, Tok<'input>, Loc), Error>;
8+
pub type SpannedResult<'input, Loc> = Result<Spanned<'input, Loc>, Error>;
9+
pub type Spanned<'input, Loc> = (Loc, Tok<'input>, Loc);
910

1011
#[derive(thiserror::Error, Clone, Debug, PartialEq)]
1112
pub enum Error {
@@ -447,7 +448,7 @@ impl StringLiteral<&str> {
447448
// -----------------------------------------------------------------------------
448449

449450
impl<'input> Iterator for Lexer<'input> {
450-
type Item = Spanned<'input, usize>;
451+
type Item = SpannedResult<'input, usize>;
451452

452453
fn next(&mut self) -> Option<Self::Item> {
453454
use Token::*;
@@ -461,7 +462,7 @@ impl<'input> Iterator for Lexer<'input> {
461462
// represent a physical character, instead it is a boundary marker.
462463
if self.query_start(start) {
463464
// dbg!("LQuery"); // NOTE: uncomment this for debugging
464-
return self.token2(start, start + 1, LQuery);
465+
return Some(Ok(self.token2(start, start + 1, LQuery)));
465466
}
466467

467468
// Check if we need to emit a `RQuery` token.
@@ -470,7 +471,7 @@ impl<'input> Iterator for Lexer<'input> {
470471
// represent a physical character, instead it is a boundary marker.
471472
if let Some(pos) = self.query_end(start) {
472473
// dbg!("RQuery"); // NOTE: uncomment this for debugging
473-
return self.token2(pos, pos + 1, RQuery);
474+
return Some(Ok(self.token2(pos, pos + 1, RQuery)));
474475
}
475476

476477
// Advance the internal iterator and emit the next token, or loop
@@ -479,26 +480,28 @@ impl<'input> Iterator for Lexer<'input> {
479480
let result = match ch {
480481
'"' => Some(self.string_literal(start)),
481482

482-
';' => self.token(start, SemiColon),
483-
'\n' => self.token(start, Newline),
484-
'\\' => self.token(start, Escape),
483+
';' => Some(Ok(self.token(start, SemiColon))),
484+
'\n' => Some(Ok(self.token(start, Newline))),
485+
'\\' => Some(Ok(self.token(start, Escape))),
485486

486-
'(' => self.open(start, LParen),
487-
'[' => self.open(start, LBracket),
488-
'{' => self.open(start, LBrace),
489-
'}' => self.close(start, RBrace),
490-
']' => self.close(start, RBracket),
491-
')' => self.close(start, RParen),
487+
'(' => Some(Ok(self.open(start, LParen))),
488+
'[' => Some(Ok(self.open(start, LBracket))),
489+
'{' => Some(Ok(self.open(start, LBrace))),
490+
'}' => Some(Ok(self.close(start, RBrace))),
491+
']' => Some(Ok(self.close(start, RBracket))),
492+
')' => Some(Ok(self.close(start, RParen))),
492493

493-
'.' => self.token(start, Dot),
494-
':' => self.token(start, Colon),
495-
',' => self.token(start, Comma),
494+
'.' => Some(Ok(self.token(start, Dot))),
495+
':' => Some(Ok(self.token(start, Colon))),
496+
',' => Some(Ok(self.token(start, Comma))),
496497

497-
'_' if self.test_peek(char::is_alphabetic) => Some(self.internal_test(start)),
498-
'_' => self.token(start, Underscore),
498+
'_' if self.test_peek(char::is_alphabetic) => {
499+
Some(Ok(self.internal_test(start)))
500+
}
501+
'_' => Some(Ok(self.token(start, Underscore))),
499502

500503
'!' if self.test_peek(|ch| ch == '!' || !is_operator(ch)) => {
501-
self.token(start, Bang)
504+
Some(Ok(self.token(start, Bang)))
502505
}
503506

504507
'#' => {
@@ -510,14 +513,14 @@ impl<'input> Iterator for Lexer<'input> {
510513
's' if self.test_peek(|ch| ch == '\'') => Some(self.raw_string_literal(start)),
511514
't' if self.test_peek(|ch| ch == '\'') => Some(self.timestamp_literal(start)),
512515

513-
ch if is_ident_start(ch) => Some(self.identifier_or_function_call(start)),
516+
ch if is_ident_start(ch) => Some(Ok(self.identifier_or_function_call(start))),
514517
ch if is_digit(ch) || (ch == '-' && self.test_peek(is_digit)) => {
515518
Some(self.numeric_literal(start))
516519
}
517-
ch if is_operator(ch) => Some(self.operator(start)),
520+
ch if is_operator(ch) => Some(Ok(self.operator(start))),
518521
ch if ch.is_whitespace() => continue,
519522

520-
ch => self.token(start, InvalidToken(ch)),
523+
ch => Some(Ok(self.token(start, InvalidToken(ch)))),
521524
};
522525

523526
// dbg!(&result); // NOTE: uncomment this for debugging
@@ -529,7 +532,7 @@ impl<'input> Iterator for Lexer<'input> {
529532
// queries.
530533
} else if let Some(end) = self.rquery_indices.pop() {
531534
// dbg!("RQuery"); // NOTE: uncomment this for debugging
532-
return self.token2(end, end + 1, RQuery);
535+
return Some(Ok(self.token2(end, end + 1, RQuery)));
533536
}
534537

535538
return None;
@@ -542,7 +545,7 @@ impl<'input> Iterator for Lexer<'input> {
542545
// -----------------------------------------------------------------------------
543546

544547
impl<'input> Lexer<'input> {
545-
fn open(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
548+
fn open(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
546549
match &token {
547550
Token::LParen => self.open_parens += 1,
548551
Token::LBracket => self.open_brackets += 1,
@@ -553,7 +556,7 @@ impl<'input> Lexer<'input> {
553556
self.token(start, token)
554557
}
555558

556-
fn close(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
559+
fn close(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
557560
match &token {
558561
Token::RParen => self.open_parens = self.open_parens.saturating_sub(1),
559562
Token::RBracket => self.open_brackets = self.open_brackets.saturating_sub(1),
@@ -564,7 +567,7 @@ impl<'input> Lexer<'input> {
564567
self.token(start, token)
565568
}
566569

567-
fn token(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
570+
fn token(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
568571
let end = self.next_index();
569572
self.token2(start, end, token)
570573
}
@@ -574,8 +577,8 @@ impl<'input> Lexer<'input> {
574577
start: usize,
575578
end: usize,
576579
token: Token<&'input str>,
577-
) -> Option<Spanned<'input, usize>> {
578-
Some(Ok((start, token, end)))
580+
) -> Spanned<'input, usize> {
581+
(start, token, end)
579582
}
580583

581584
fn query_end(&mut self, start: usize) -> Option<usize> {
@@ -640,7 +643,7 @@ impl<'input> Lexer<'input> {
640643
let mut end = 0;
641644
while let Some((pos, ch)) = chars.next() {
642645
let take_until_end =
643-
|result: Spanned<'input, usize>,
646+
|result: SpannedResult<'input, usize>,
644647
last_char: &mut Option<char>,
645648
end: &mut usize,
646649
chars: &mut Peekable<CharIndices<'input>>| {
@@ -735,7 +738,7 @@ impl<'input> Lexer<'input> {
735738
while let Some((pos, ch)) = chars.peek() {
736739
let pos = *pos;
737740

738-
let literal_check = |result: Spanned<'input, usize>, chars: &mut Peekable<CharIndices<'input>>| match result {
741+
let literal_check = |result: SpannedResult<'input, usize>, chars: &mut Peekable<CharIndices<'input>>| match result {
739742
Err(_) => Err(()),
740743
Ok((_, _, new)) => {
741744
#[allow(clippy::while_let_on_iterator)]
@@ -854,7 +857,7 @@ impl<'input> Lexer<'input> {
854857
true
855858
}
856859

857-
fn string_literal(&mut self, start: usize) -> Spanned<'input, usize> {
860+
fn string_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
858861
let content_start = self.next_index();
859862

860863
loop {
@@ -876,19 +879,19 @@ impl<'input> Lexer<'input> {
876879
Err(Error::StringLiteral { start })
877880
}
878881

879-
fn regex_literal(&mut self, start: usize) -> Spanned<'input, usize> {
882+
fn regex_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
880883
self.quoted_literal(start, Token::RegexLiteral)
881884
}
882885

883-
fn raw_string_literal(&mut self, start: usize) -> Spanned<'input, usize> {
886+
fn raw_string_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
884887
self.quoted_literal(start, |c| Token::StringLiteral(StringLiteral::Raw(c)))
885888
}
886889

887-
fn timestamp_literal(&mut self, start: usize) -> Spanned<'input, usize> {
890+
fn timestamp_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
888891
self.quoted_literal(start, Token::TimestampLiteral)
889892
}
890893

891-
fn numeric_literal(&mut self, start: usize) -> Spanned<'input, usize> {
894+
fn numeric_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
892895
let (end, int) = self.take_while(start, |ch| is_digit(ch) || ch == '_');
893896

894897
match self.peek() {
@@ -928,7 +931,7 @@ impl<'input> Lexer<'input> {
928931
Token::ident(ident)
929932
};
930933

931-
Ok((start, token, end))
934+
(start, token, end)
932935
}
933936

934937
fn operator(&mut self, start: usize) -> Spanned<'input, usize> {
@@ -941,21 +944,21 @@ impl<'input> Lexer<'input> {
941944
op => Token::Operator(op),
942945
};
943946

944-
Ok((start, token, end))
947+
(start, token, end)
945948
}
946949

947950
fn internal_test(&mut self, start: usize) -> Spanned<'input, usize> {
948951
self.bump();
949952
let (end, test) = self.take_while(start, char::is_alphabetic);
950953

951-
Ok((start, Token::InternalTest(test), end))
954+
(start, Token::InternalTest(test), end)
952955
}
953956

954957
fn quoted_literal(
955958
&mut self,
956959
start: usize,
957960
tok: impl Fn(&'input str) -> Tok<'input>,
958-
) -> Spanned<'input, usize> {
961+
) -> SpannedResult<'input, usize> {
959962
self.bump();
960963
let content_start = self.next_index();
961964

@@ -1122,7 +1125,7 @@ mod test {
11221125
use super::*;
11231126
use crate::lex::Token::*;
11241127

1125-
fn lexer(input: &str) -> impl Iterator<Item = Spanned<'_, usize>> + '_ {
1128+
fn lexer(input: &str) -> impl Iterator<Item = SpannedResult<'_, usize>> + '_ {
11261129
let mut lexer = Lexer::new(input);
11271130
Box::new(std::iter::from_fn(move || Some(lexer.next()?)))
11281131
}

lib/vrl/stdlib/src/parse_aws_vpc_flow_log.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,7 @@ impl Expression for ParseAwsVpcFlowLogFn {
130130

131131
type ParseResult<T> = std::result::Result<T, String>;
132132

133+
#[allow(clippy::unnecessary_wraps)] // match other parse methods
133134
fn identity<'a>(_key: &'a str, value: &'a str) -> ParseResult<&'a str> {
134135
Ok(value)
135136
}

0 commit comments

Comments
 (0)