@@ -970,15 +970,15 @@ impl<'a> Parser<'a> {
970
970
t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
971
971
if self.peek_token().token == Token::Period {
972
972
let mut id_parts: Vec<Ident> = vec![match t {
973
- Token::Word(w) => w.to_ident (next_token.span),
973
+ Token::Word(w) => w.into_ident (next_token.span),
974
974
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
975
975
_ => unreachable!(), // We matched above
976
976
}];
977
977
978
978
while self.consume_token(&Token::Period) {
979
979
let next_token = self.next_token();
980
980
match next_token.token {
981
- Token::Word(w) => id_parts.push(w.to_ident (next_token.span)),
981
+ Token::Word(w) => id_parts.push(w.into_ident (next_token.span)),
982
982
Token::SingleQuotedString(s) => {
983
983
// SQLite has single-quoted identifiers
984
984
id_parts.push(Ident::with_quote('\'', s))
@@ -1108,7 +1108,7 @@ impl<'a> Parser<'a> {
1108
1108
if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1109
1109
{
1110
1110
Ok(Some(Expr::Function(Function {
1111
- name: ObjectName(vec![w.to_ident (w_span)]),
1111
+ name: ObjectName(vec![w.clone().into_ident (w_span)]),
1112
1112
uses_odbc_syntax: false,
1113
1113
parameters: FunctionArguments::None,
1114
1114
args: FunctionArguments::None,
@@ -1123,7 +1123,7 @@ impl<'a> Parser<'a> {
1123
1123
| Keyword::CURRENT_DATE
1124
1124
| Keyword::LOCALTIME
1125
1125
| Keyword::LOCALTIMESTAMP => {
1126
- Ok(Some(self.parse_time_functions(ObjectName(vec![w.to_ident (w_span)]))?))
1126
+ Ok(Some(self.parse_time_functions(ObjectName(vec![w.clone().into_ident (w_span)]))?))
1127
1127
}
1128
1128
Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1129
1129
Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
@@ -1148,7 +1148,7 @@ impl<'a> Parser<'a> {
1148
1148
Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1149
1149
Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1150
1150
Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1151
- Ok(Some(self.parse_position_expr(w.to_ident (w_span))?))
1151
+ Ok(Some(self.parse_position_expr(w.clone().into_ident (w_span))?))
1152
1152
}
1153
1153
Keyword::SUBSTRING => Ok(Some(self.parse_substring_expr()?)),
1154
1154
Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
@@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
1167
1167
let query = self.parse_query()?;
1168
1168
self.expect_token(&Token::RParen)?;
1169
1169
Ok(Some(Expr::Function(Function {
1170
- name: ObjectName(vec![w.to_ident (w_span)]),
1170
+ name: ObjectName(vec![w.clone().into_ident (w_span)]),
1171
1171
uses_odbc_syntax: false,
1172
1172
parameters: FunctionArguments::None,
1173
1173
args: FunctionArguments::Subquery(query),
@@ -1203,11 +1203,12 @@ impl<'a> Parser<'a> {
1203
1203
w_span: Span,
1204
1204
) -> Result<Expr, ParserError> {
1205
1205
match self.peek_token().token {
1206
- Token::Period => {
1207
- self.parse_compound_field_access(Expr::Identifier(w.to_ident(w_span)), vec![])
1208
- }
1206
+ Token::Period => self.parse_compound_field_access(
1207
+ Expr::Identifier(w.clone().into_ident(w_span)),
1208
+ vec![],
1209
+ ),
1209
1210
Token::LParen => {
1210
- let id_parts = vec![w.to_ident (w_span)];
1211
+ let id_parts = vec![w.clone().into_ident (w_span)];
1211
1212
if let Some(expr) = self.parse_outer_join_expr(&id_parts) {
1212
1213
Ok(expr)
1213
1214
} else {
@@ -1220,7 +1221,7 @@ impl<'a> Parser<'a> {
1220
1221
}
1221
1222
Token::LBracket if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect | ClickHouseDialect | BigQueryDialect) =>
1222
1223
{
1223
- let ident = Expr::Identifier(w.to_ident (w_span));
1224
+ let ident = Expr::Identifier(w.clone().into_ident (w_span));
1224
1225
let mut fields = vec![];
1225
1226
self.parse_multi_dim_subscript(&mut fields)?;
1226
1227
self.parse_compound_field_access(ident, fields)
@@ -1250,11 +1251,11 @@ impl<'a> Parser<'a> {
1250
1251
Token::Arrow if self.dialect.supports_lambda_functions() => {
1251
1252
self.expect_token(&Token::Arrow)?;
1252
1253
Ok(Expr::Lambda(LambdaFunction {
1253
- params: OneOrManyWithParens::One(w.to_ident (w_span)),
1254
+ params: OneOrManyWithParens::One(w.clone().into_ident (w_span)),
1254
1255
body: Box::new(self.parse_expr()?),
1255
1256
}))
1256
1257
}
1257
- _ => Ok(Expr::Identifier(w.to_ident (w_span))),
1258
+ _ => Ok(Expr::Identifier(w.clone().into_ident (w_span))),
1258
1259
}
1259
1260
}
1260
1261
@@ -1438,7 +1439,7 @@ impl<'a> Parser<'a> {
1438
1439
} else {
1439
1440
let tok = self.next_token();
1440
1441
let key = match tok.token {
1441
- Token::Word(word) => word.to_ident (tok.span),
1442
+ Token::Word(word) => word.into_ident (tok.span),
1442
1443
_ => {
1443
1444
return parser_err!(
1444
1445
format!("Expected identifier, found: {tok}"),
@@ -1490,7 +1491,7 @@ impl<'a> Parser<'a> {
1490
1491
let next_token = self.next_token();
1491
1492
match next_token.token {
1492
1493
Token::Word(w) => {
1493
- let expr = Expr::Identifier(w.to_ident (next_token.span));
1494
+ let expr = Expr::Identifier(w.into_ident (next_token.span));
1494
1495
chain.push(AccessExpr::Dot(expr));
1495
1496
if self.peek_token().token == Token::LBracket {
1496
1497
if self.dialect.supports_partiql() {
@@ -1670,7 +1671,7 @@ impl<'a> Parser<'a> {
1670
1671
while p.consume_token(&Token::Period) {
1671
1672
let tok = p.next_token();
1672
1673
let name = match tok.token {
1673
- Token::Word(word) => word.to_ident (tok.span),
1674
+ Token::Word(word) => word.into_ident (tok.span),
1674
1675
_ => return p.expected("identifier", tok),
1675
1676
};
1676
1677
let func = match p.parse_function(ObjectName(vec![name]))? {
@@ -8252,7 +8253,7 @@ impl<'a> Parser<'a> {
8252
8253
// This because snowflake allows numbers as placeholders
8253
8254
let next_token = self.next_token();
8254
8255
let ident = match next_token.token {
8255
- Token::Word(w) => Ok(w.to_ident (next_token.span)),
8256
+ Token::Word(w) => Ok(w.into_ident (next_token.span)),
8256
8257
Token::Number(w, false) => Ok(Ident::new(w)),
8257
8258
_ => self.expected("placeholder", next_token),
8258
8259
}?;
@@ -8763,7 +8764,7 @@ impl<'a> Parser<'a> {
8763
8764
// (For example, in `FROM t1 JOIN` the `JOIN` will always be parsed as a keyword,
8764
8765
// not an alias.)
8765
8766
Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => {
8766
- Ok(Some(w.to_ident (next_token.span)))
8767
+ Ok(Some(w.into_ident (next_token.span)))
8767
8768
}
8768
8769
// MSSQL supports single-quoted strings as aliases for columns
8769
8770
// We accept them as table aliases too, although MSSQL does not.
@@ -8930,7 +8931,7 @@ impl<'a> Parser<'a> {
8930
8931
loop {
8931
8932
match &self.peek_token_ref().token {
8932
8933
Token::Word(w) => {
8933
- idents.push(w.to_ident (self.peek_token_ref().span));
8934
+ idents.push(w.clone().into_ident (self.peek_token_ref().span));
8934
8935
}
8935
8936
Token::EOF | Token::Eq => break,
8936
8937
_ => {}
@@ -8985,7 +8986,7 @@ impl<'a> Parser<'a> {
8985
8986
// expecting at least one word for identifier
8986
8987
let next_token = self.next_token();
8987
8988
match next_token.token {
8988
- Token::Word(w) => idents.push(w.to_ident (next_token.span)),
8989
+ Token::Word(w) => idents.push(w.into_ident (next_token.span)),
8989
8990
Token::EOF => {
8990
8991
return Err(ParserError::ParserError(
8991
8992
"Empty input when parsing identifier".to_string(),
@@ -9005,7 +9006,7 @@ impl<'a> Parser<'a> {
9005
9006
Token::Period => {
9006
9007
let next_token = self.next_token();
9007
9008
match next_token.token {
9008
- Token::Word(w) => idents.push(w.to_ident (next_token.span)),
9009
+ Token::Word(w) => idents.push(w.into_ident (next_token.span)),
9009
9010
Token::EOF => {
9010
9011
return Err(ParserError::ParserError(
9011
9012
"Trailing period in identifier".to_string(),
@@ -9034,7 +9035,7 @@ impl<'a> Parser<'a> {
9034
9035
pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
9035
9036
let next_token = self.next_token();
9036
9037
match next_token.token {
9037
- Token::Word(w) => Ok(w.to_ident (next_token.span)),
9038
+ Token::Word(w) => Ok(w.into_ident (next_token.span)),
9038
9039
Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
9039
9040
Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
9040
9041
_ => self.expected("identifier", next_token),
@@ -9054,9 +9055,10 @@ impl<'a> Parser<'a> {
9054
9055
fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
9055
9056
match self.peek_token().token {
9056
9057
Token::Word(w) => {
9058
+ let quote_style_is_none = w.quote_style.is_none();
9057
9059
let mut requires_whitespace = false;
9058
- let mut ident = w.to_ident (self.next_token().span);
9059
- if w.quote_style.is_none() {
9060
+ let mut ident = w.into_ident (self.next_token().span);
9061
+ if quote_style_is_none {
9060
9062
while matches!(self.peek_token_no_skip().token, Token::Minus) {
9061
9063
self.next_token();
9062
9064
ident.value.push('-');
@@ -13485,13 +13487,23 @@ impl<'a> Parser<'a> {
13485
13487
}
13486
13488
13487
13489
impl Word {
13490
+ #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
13488
13491
pub fn to_ident(&self, span: Span) -> Ident {
13489
13492
Ident {
13490
13493
value: self.value.clone(),
13491
13494
quote_style: self.quote_style,
13492
13495
span,
13493
13496
}
13494
13497
}
13498
+
13499
+ /// Convert this word into an [`Ident`] identifier
13500
+ pub fn into_ident(self, span: Span) -> Ident {
13501
+ Ident {
13502
+ value: self.value,
13503
+ quote_style: self.quote_style,
13504
+ span,
13505
+ }
13506
+ }
13495
13507
}
13496
13508
13497
13509
#[cfg(test)]
0 commit comments