From 3702db0c3cb94cca1193e573e8b3859becf07482 Mon Sep 17 00:00:00 2001 From: f01dab1e Date: Thu, 12 Oct 2023 14:37:56 +0000 Subject: [PATCH] update tests --- compiler/noirc_frontend/src/lexer/lexer.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index 9545c4b84ee..1bb54742622 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -551,7 +551,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got, token); } } @@ -657,7 +657,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got, token); } } @@ -681,7 +681,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got, token); } } @@ -711,7 +711,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let first_lexer_output = lexer.next_token().unwrap(); + let first_lexer_output = lexer.next().unwrap().unwrap(); assert_eq!(first_lexer_output, token); } } @@ -733,7 +733,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let first_lexer_output = lexer.next_token().unwrap(); + let first_lexer_output = lexer.next().unwrap().unwrap(); assert_eq!(first_lexer_output, token); } } @@ -755,7 +755,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let first_lexer_output = lexer.next_token().unwrap(); + let first_lexer_output = lexer.next().unwrap().unwrap(); assert_eq!(first_lexer_output, token); } } @@ -772,7 +772,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got, token); } } @@ -785,7 +785,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got, token); } } @@ -824,7 +824,7 @@ mod tests { let mut lexer = Lexer::new(input); for spanned_token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got.to_span(), spanned_token.to_span()); assert_eq!(got, spanned_token); } @@ -895,7 +895,7 @@ mod tests { let mut lexer = Lexer::new(input); for token in expected.into_iter() { - let got = lexer.next_token().unwrap(); + let got = lexer.next().unwrap().unwrap(); assert_eq!(got, token); } }