diff --git a/src/ast/mod.rs b/src/ast/mod.rs index 176d36545..3a448312b 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -2787,10 +2787,11 @@ impl fmt::Display for Declare { } /// Sql options of a `CREATE TABLE` statement. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Default)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum CreateTableOptions { + #[default] None, /// Options specified using the `WITH` keyword. /// e.g. `WITH (description = "123")` @@ -2819,12 +2820,6 @@ pub enum CreateTableOptions { TableProperties(Vec), } -impl Default for CreateTableOptions { - fn default() -> Self { - Self::None - } -} - impl fmt::Display for CreateTableOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/src/ast/spans.rs b/src/ast/spans.rs index 7d2a00095..f3120b78f 100644 --- a/src/ast/spans.rs +++ b/src/ast/spans.rs @@ -2371,7 +2371,7 @@ pub mod tests { #[test] fn test_join() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "SELECT id, name FROM users LEFT JOIN companies ON users.company_id = companies.id", ); @@ -2396,7 +2396,7 @@ pub mod tests { #[test] pub fn test_union() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "SELECT a FROM postgres.public.source UNION SELECT a FROM postgres.public.source", ); @@ -2413,7 +2413,7 @@ pub mod tests { #[test] pub fn test_subquery() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "SELECT a FROM (SELECT a FROM postgres.public.source) AS b", ); @@ -2438,7 +2438,7 @@ pub mod tests { #[test] pub fn test_cte() { let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); + let test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); let query = test.0.parse_query().unwrap(); @@ -2450,7 +2450,7 @@ pub mod tests { #[test] pub fn test_snowflake_lateral_flatten() { let dialect = &SnowflakeDialect; - let mut test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); + let test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); let query = test.0.parse_select().unwrap(); @@ -2462,7 +2462,7 @@ pub mod tests { #[test] pub fn test_wildcard_from_cte() { let dialect = &GenericDialect; - let mut test = SpanTest::new( + let test = SpanTest::new( dialect, "WITH cte AS (SELECT a FROM postgres.public.source) SELECT cte.* FROM cte", ); @@ -2488,7 +2488,7 @@ pub mod tests { #[test] fn test_case_expr_span() { let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END"); + let test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END"); let expr = test.0.parse_expr().unwrap(); let expr_span = expr.span(); assert_eq!( diff --git a/src/dialect/bigquery.rs b/src/dialect/bigquery.rs index 27fd3cca3..d1a61553e 100644 --- a/src/dialect/bigquery.rs +++ b/src/dialect/bigquery.rs @@ -46,7 +46,7 @@ const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ pub struct BigQueryDialect; impl Dialect for BigQueryDialect { - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::BEGIN) { if parser.peek_keyword(Keyword::TRANSACTION) || parser.peek_token_ref().token == Token::SemiColon @@ -145,7 +145,7 @@ impl Dialect for BigQueryDialect { true } - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !RESERVED_FOR_COLUMN_ALIAS.contains(kw) } diff --git a/src/dialect/mod.rs b/src/dialect/mod.rs index ef4e1cdde..3846eebc0 100644 --- a/src/dialect/mod.rs +++ b/src/dialect/mod.rs @@ -466,7 +466,7 @@ pub trait Dialect: Debug + Any { } /// Dialect-specific prefix parser override - fn parse_prefix(&self, _parser: &mut Parser) -> Option> { + fn parse_prefix(&self, _parser: &Parser) -> Option> { // return None to fall back to the default behavior None } @@ -615,7 +615,7 @@ pub trait Dialect: Debug + Any { /// If `None` is returned, falls back to the default behavior. fn parse_infix( &self, - _parser: &mut Parser, + _parser: &Parser, _expr: &Expr, _precedence: u8, ) -> Option> { @@ -778,7 +778,7 @@ pub trait Dialect: Debug + Any { /// This method is called to parse the next statement. /// /// If `None` is returned, falls back to the default behavior. - fn parse_statement(&self, _parser: &mut Parser) -> Option> { + fn parse_statement(&self, _parser: &Parser) -> Option> { // return None to fall back to the default behavior None } @@ -790,7 +790,7 @@ pub trait Dialect: Debug + Any { /// If `None` is returned, falls back to the default behavior. fn parse_column_option( &self, - _parser: &mut Parser, + _parser: &Parser, ) -> Result, ParserError>>, ParserError> { // return None to fall back to the default behavior Ok(None) @@ -1021,33 +1021,33 @@ pub trait Dialect: Debug + Any { /// Returns true if the specified keyword should be parsed as a column identifier. /// See [keywords::RESERVED_FOR_COLUMN_ALIAS] - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) } /// Returns true if the specified keyword should be parsed as a select item alias. /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided /// to enable looking ahead if needed. - fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &Parser) -> bool { explicit || self.is_column_alias(kw, parser) } /// Returns true if the specified keyword should be parsed as a table factor identifier. /// See [keywords::RESERVED_FOR_TABLE_FACTOR] - fn is_table_factor(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_table_factor(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_TABLE_FACTOR.contains(kw) } /// Returns true if the specified keyword should be parsed as a table factor alias. /// See [keywords::RESERVED_FOR_TABLE_ALIAS] - fn is_table_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_table_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) } /// Returns true if the specified keyword should be parsed as a table factor alias. /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided /// to enable looking ahead if needed. - fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, parser: &Parser) -> bool { explicit || self.is_table_alias(kw, parser) } @@ -1400,14 +1400,14 @@ mod tests { fn parse_prefix( &self, - parser: &mut sqlparser::parser::Parser, + parser: &sqlparser::parser::Parser, ) -> Option> { self.0.parse_prefix(parser) } fn parse_infix( &self, - parser: &mut sqlparser::parser::Parser, + parser: &sqlparser::parser::Parser, expr: &Expr, precedence: u8, ) -> Option> { @@ -1423,7 +1423,7 @@ mod tests { fn parse_statement( &self, - parser: &mut sqlparser::parser::Parser, + parser: &sqlparser::parser::Parser, ) -> Option> { self.0.parse_statement(parser) } diff --git a/src/dialect/mssql.rs b/src/dialect/mssql.rs index e1902b389..d86519ac6 100644 --- a/src/dialect/mssql.rs +++ b/src/dialect/mssql.rs @@ -128,11 +128,11 @@ impl Dialect for MsSqlDialect { &[GranteesType::Public] } - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, _parser: &Parser) -> bool { !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) && !RESERVED_FOR_COLUMN_ALIAS.contains(kw) } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.peek_keyword(Keyword::IF) { Some(self.parse_if_stmt(parser)) } else if parser.parse_keywords(&[Keyword::CREATE, Keyword::TRIGGER]) { @@ -157,7 +157,7 @@ impl MsSqlDialect { /// [ ELSE /// { sql_statement | statement_block } ] /// ``` - fn parse_if_stmt(&self, parser: &mut Parser) -> Result { + fn parse_if_stmt(&self, parser: &Parser) -> Result { let if_token = parser.expect_keyword(Keyword::IF)?; let condition = parser.parse_expr()?; @@ -240,7 +240,7 @@ impl MsSqlDialect { /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql fn parse_create_trigger( &self, - parser: &mut Parser, + parser: &Parser, or_alter: bool, ) -> Result { let name = parser.parse_object_name(false)?; @@ -279,7 +279,7 @@ impl MsSqlDialect { /// Stops parsing when reaching EOF or the given keyword. fn parse_statement_list( &self, - parser: &mut Parser, + parser: &Parser, terminal_keyword: Option, ) -> Result, ParserError> { let mut stmts = Vec::new(); diff --git a/src/dialect/mysql.rs b/src/dialect/mysql.rs index 8d2a5ad4b..f8c96d127 100644 --- a/src/dialect/mysql.rs +++ b/src/dialect/mysql.rs @@ -86,7 +86,7 @@ impl Dialect for MySqlDialect { fn parse_infix( &self, - parser: &mut crate::parser::Parser, + parser: &crate::parser::Parser, expr: &crate::ast::Expr, _precedence: u8, ) -> Option> { @@ -102,7 +102,7 @@ impl Dialect for MySqlDialect { } } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keywords(&[Keyword::LOCK, Keyword::TABLES]) { Some(parse_lock_tables(parser)) } else if parser.parse_keywords(&[Keyword::UNLOCK, Keyword::TABLES]) { @@ -134,7 +134,7 @@ impl Dialect for MySqlDialect { true } - fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &Parser) -> bool { explicit || (!keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) && !RESERVED_FOR_TABLE_ALIAS_MYSQL.contains(kw)) @@ -171,13 +171,13 @@ impl Dialect for MySqlDialect { /// `LOCK TABLES` /// -fn parse_lock_tables(parser: &mut Parser) -> Result { +fn parse_lock_tables(parser: &Parser) -> Result { let tables = parser.parse_comma_separated(parse_lock_table)?; Ok(Statement::LockTables { tables }) } // tbl_name [[AS] alias] lock_type -fn parse_lock_table(parser: &mut Parser) -> Result { +fn parse_lock_table(parser: &Parser) -> Result { let table = parser.parse_identifier()?; let alias = parser.parse_optional_alias(&[Keyword::READ, Keyword::WRITE, Keyword::LOW_PRIORITY])?; @@ -191,7 +191,7 @@ fn parse_lock_table(parser: &mut Parser) -> Result { } // READ [LOCAL] | [LOW_PRIORITY] WRITE -fn parse_lock_tables_type(parser: &mut Parser) -> Result { +fn parse_lock_tables_type(parser: &Parser) -> Result { if parser.parse_keyword(Keyword::READ) { if parser.parse_keyword(Keyword::LOCAL) { Ok(LockTableType::Read { local: true }) @@ -211,6 +211,6 @@ fn parse_lock_tables_type(parser: &mut Parser) -> Result -fn parse_unlock_tables(_parser: &mut Parser) -> Result { +fn parse_unlock_tables(_parser: &Parser) -> Result { Ok(Statement::UnlockTables) } diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index 825fd45f0..0083d2f2a 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -209,7 +209,7 @@ impl Dialect for SnowflakeDialect { true } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::BEGIN) { return Some(parser.parse_begin_exception_end()); } @@ -311,7 +311,7 @@ impl Dialect for SnowflakeDialect { fn parse_column_option( &self, - parser: &mut Parser, + parser: &Parser, ) -> Result, ParserError>>, ParserError> { parser.maybe_parse(|parser| { let with = parser.parse_keyword(Keyword::WITH); @@ -384,7 +384,7 @@ impl Dialect for SnowflakeDialect { true } - fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_column_alias(&self, kw: &Keyword, parser: &Parser) -> bool { match kw { // The following keywords can be considered an alias as long as // they are not followed by other tokens that may change their meaning @@ -430,7 +430,7 @@ impl Dialect for SnowflakeDialect { } } - fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_table_alias(&self, kw: &Keyword, parser: &Parser) -> bool { match kw { // The following keywords can be considered an alias as long as // they are not followed by other tokens that may change their meaning @@ -514,7 +514,7 @@ impl Dialect for SnowflakeDialect { } } - fn is_table_factor(&self, kw: &Keyword, parser: &mut Parser) -> bool { + fn is_table_factor(&self, kw: &Keyword, parser: &Parser) -> bool { match kw { Keyword::LIMIT if peek_for_limit_options(parser) => false, // Table function @@ -584,7 +584,7 @@ fn peek_for_limit_options(parser: &Parser) -> bool { } } -fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result { +fn parse_file_staging_command(kw: Keyword, parser: &Parser) -> Result { let stage = parse_snowflake_stage_name(parser)?; let pattern = if parser.parse_keyword(Keyword::PATTERN) { parser.expect_token(&Token::Eq)?; @@ -606,7 +606,7 @@ fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result -fn parse_alter_session(parser: &mut Parser, set: bool) -> Result { +fn parse_alter_session(parser: &Parser, set: bool) -> Result { let session_options = parse_session_options(parser, set)?; Ok(Statement::AlterSession { set, @@ -629,7 +629,7 @@ pub fn parse_create_table( transient: bool, iceberg: bool, dynamic: bool, - parser: &mut Parser, + parser: &Parser, ) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = parser.parse_object_name(false)?; @@ -867,7 +867,7 @@ pub fn parse_create_table( pub fn parse_create_database( or_replace: bool, transient: bool, - parser: &mut Parser, + parser: &Parser, ) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = parser.parse_object_name(false)?; @@ -972,7 +972,7 @@ pub fn parse_create_database( } pub fn parse_storage_serialization_policy( - parser: &mut Parser, + parser: &Parser, ) -> Result { let next_token = parser.next_token(); match &next_token.token { @@ -988,7 +988,7 @@ pub fn parse_storage_serialization_policy( pub fn parse_create_stage( or_replace: bool, temporary: bool, - parser: &mut Parser, + parser: &Parser, ) -> Result { //[ IF NOT EXISTS ] let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -1047,7 +1047,7 @@ pub fn parse_create_stage( }) } -pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result { +pub fn parse_stage_name_identifier(parser: &Parser) -> Result { let mut ident = String::new(); while let Some(next_token) = parser.next_token_no_skip() { match &next_token.token { @@ -1074,7 +1074,7 @@ pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result Result { +pub fn parse_snowflake_stage_name(parser: &Parser) -> Result { match parser.next_token().token { Token::AtSign => { parser.prev_token(); @@ -1096,7 +1096,7 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result` /// and `COPY INTO ` which have different syntax. -pub fn parse_copy_into(parser: &mut Parser) -> Result { +pub fn parse_copy_into(parser: &Parser) -> Result { let kind = match parser.peek_token().token { // Indicates an internal stage Token::AtSign => CopyIntoSnowflakeKind::Location, @@ -1258,7 +1258,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { } fn parse_select_items_for_data_load( - parser: &mut Parser, + parser: &Parser, ) -> Result>, ParserError> { let mut select_items: Vec = vec![]; loop { @@ -1279,9 +1279,7 @@ fn parse_select_items_for_data_load( Ok(Some(select_items)) } -fn parse_select_item_for_data_load( - parser: &mut Parser, -) -> Result { +fn parse_select_item_for_data_load(parser: &Parser) -> Result { let mut alias: Option = None; let mut file_col_num: i32 = 0; let mut element: Option = None; @@ -1348,7 +1346,7 @@ fn parse_select_item_for_data_load( }) } -fn parse_stage_params(parser: &mut Parser) -> Result { +fn parse_stage_params(parser: &Parser) -> Result { let (mut url, mut storage_integration, mut endpoint) = (None, None, None); let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![], @@ -1414,10 +1412,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result Result, ParserError> { +fn parse_session_options(parser: &Parser, set: bool) -> Result, ParserError> { let mut options: Vec = Vec::new(); let empty = String::new; loop { @@ -1460,7 +1455,7 @@ fn parse_session_options( /// [ (seed , increment) | START num INCREMENT num ] [ ORDER | NOORDER ] /// ``` /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table -fn parse_identity_property(parser: &mut Parser) -> Result { +fn parse_identity_property(parser: &Parser) -> Result { let parameters = if parser.consume_token(&Token::LParen) { let seed = parser.parse_number()?; parser.expect_token(&Token::Comma)?; @@ -1496,7 +1491,7 @@ fn parse_identity_property(parser: &mut Parser) -> Result Result { let policy_name = parser.parse_object_name(false)?; @@ -1522,7 +1517,7 @@ fn parse_column_policy_property( /// ( = '' [ , = '' , ... ] ) /// ``` /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table -fn parse_column_tags(parser: &mut Parser, with: bool) -> Result { +fn parse_column_tags(parser: &Parser, with: bool) -> Result { parser.expect_token(&Token::LParen)?; let tags = parser.parse_comma_separated(Parser::parse_tag)?; parser.expect_token(&Token::RParen)?; @@ -1532,7 +1527,7 @@ fn parse_column_tags(parser: &mut Parser, with: bool) -> Result -fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result { +fn parse_show_objects(terse: bool, parser: &Parser) -> Result { let show_options = parser.parse_show_stmt_options()?; Ok(Statement::ShowObjects(ShowObjects { terse, diff --git a/src/dialect/sqlite.rs b/src/dialect/sqlite.rs index 64a8d532f..94cd7d336 100644 --- a/src/dialect/sqlite.rs +++ b/src/dialect/sqlite.rs @@ -65,7 +65,7 @@ impl Dialect for SQLiteDialect { self.is_identifier_start(ch) || ch.is_ascii_digit() } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::REPLACE) { parser.prev_token(); Some(parser.parse_insert()) @@ -76,7 +76,7 @@ impl Dialect for SQLiteDialect { fn parse_infix( &self, - parser: &mut crate::parser::Parser, + parser: &crate::parser::Parser, expr: &crate::ast::Expr, _precedence: u8, ) -> Option> { diff --git a/src/parser/alter.rs b/src/parser/alter.rs index b3e3c99e6..ab88f0463 100644 --- a/src/parser/alter.rs +++ b/src/parser/alter.rs @@ -30,7 +30,7 @@ use crate::{ }; impl Parser<'_> { - pub fn parse_alter_role(&mut self) -> Result { + pub fn parse_alter_role(&self) -> Result { if dialect_of!(self is PostgreSqlDialect) { return self.parse_pg_alter_role(); } else if dialect_of!(self is MsSqlDialect) { @@ -53,7 +53,7 @@ impl Parser<'_> { /// ``` /// /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-alterpolicy.html) - pub fn parse_alter_policy(&mut self) -> Result { + pub fn parse_alter_policy(&self) -> Result { let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; @@ -110,7 +110,7 @@ impl Parser<'_> { /// /// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role; /// ``` - pub fn parse_alter_connector(&mut self) -> Result { + pub fn parse_alter_connector(&self) -> Result { let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::SET)?; @@ -147,7 +147,7 @@ impl Parser<'_> { /// ```sql /// ALTER USER [ IF EXISTS ] [ ] [ OPTIONS ] /// ``` - pub fn parse_alter_user(&mut self) -> Result { + pub fn parse_alter_user(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; let rename_to = if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) { @@ -314,7 +314,7 @@ impl Parser<'_> { })) } - fn parse_mfa_method(&mut self) -> Result { + fn parse_mfa_method(&self) -> Result { if self.parse_keyword(Keyword::PASSKEY) { Ok(MfaMethodKind::PassKey) } else if self.parse_keyword(Keyword::TOTP) { @@ -326,7 +326,7 @@ impl Parser<'_> { } } - fn parse_mssql_alter_role(&mut self) -> Result { + fn parse_mssql_alter_role(&self) -> Result { let role_name = self.parse_identifier()?; let operation = if self.parse_keywords(&[Keyword::ADD, Keyword::MEMBER]) { @@ -352,7 +352,7 @@ impl Parser<'_> { }) } - fn parse_pg_alter_role(&mut self) -> Result { + fn parse_pg_alter_role(&self) -> Result { let role_name = self.parse_identifier()?; // [ IN DATABASE _`database_name`_ ] @@ -436,7 +436,7 @@ impl Parser<'_> { }) } - fn parse_pg_role_option(&mut self) -> Result { + fn parse_pg_role_option(&self) -> Result { let option = match self.parse_one_of_keywords(&[ Keyword::BYPASSRLS, Keyword::NOBYPASSRLS, diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 9a01e510b..26e961277 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -44,6 +44,7 @@ use crate::dialect::*; use crate::keywords::{Keyword, ALL_KEYWORDS}; use crate::tokenizer::*; use sqlparser::parser::ParserState::ColumnDefinition; +use core::cell::Cell; mod alter; @@ -331,9 +332,9 @@ pub struct Parser<'a> { /// The tokens tokens: Vec, /// The index of the first unprocessed token in [`Parser::tokens`]. - index: usize, + index: Cell, /// The current state of the parser. - state: ParserState, + state: Cell, /// The SQL dialect to use. dialect: &'a dyn Dialect, /// Additional options that allow you to mix & match behavior @@ -363,8 +364,8 @@ impl<'a> Parser<'a> { pub fn new(dialect: &'a dyn Dialect) -> Self { Self { tokens: vec![], - index: 0, - state: ParserState::Normal, + index: Cell::new(0), + state: Cell::new(ParserState::Normal), dialect, recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH), options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()), @@ -428,7 +429,7 @@ impl<'a> Parser<'a> { /// Reset this parser to parse the specified token stream pub fn with_tokens_with_locations(mut self, tokens: Vec) -> Self { self.tokens = tokens; - self.index = 0; + self.index = Cell::new(0); self } @@ -474,7 +475,7 @@ impl<'a> Parser<'a> { /// # Ok(()) /// # } /// ``` - pub fn parse_statements(&mut self) -> Result, ParserError> { + pub fn parse_statements(&self) -> Result, ParserError> { let mut stmts = Vec::new(); let mut expecting_statement_delimiter = false; loop { @@ -531,7 +532,7 @@ impl<'a> Parser<'a> { /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.), /// stopping before the statement separator, if any. - pub fn parse_statement(&mut self) -> Result { + pub fn parse_statement(&self) -> Result { let _guard = self.recursion_counter.try_decrease()?; // allow the dialect to override statement parsing @@ -669,7 +670,7 @@ impl<'a> Parser<'a> { /// Parse a `CASE` statement. /// /// See [Statement::Case] - pub fn parse_case_stmt(&mut self) -> Result { + pub fn parse_case_stmt(&self) -> Result { let case_token = self.expect_keyword(Keyword::CASE)?; let match_expr = if self.peek_keyword(Keyword::WHEN) { @@ -706,7 +707,7 @@ impl<'a> Parser<'a> { /// Parse an `IF` statement. /// /// See [Statement::If] - pub fn parse_if_stmt(&mut self) -> Result { + pub fn parse_if_stmt(&self) -> Result { self.expect_keyword_is(Keyword::IF)?; let if_block = self.parse_conditional_statement_block(&[ Keyword::ELSE, @@ -746,7 +747,7 @@ impl<'a> Parser<'a> { /// Parse a `WHILE` statement. /// /// See [Statement::While] - fn parse_while(&mut self) -> Result { + fn parse_while(&self) -> Result { self.expect_keyword_is(Keyword::WHILE)?; let while_block = self.parse_conditional_statement_block(&[Keyword::END])?; @@ -761,7 +762,7 @@ impl<'a> Parser<'a> { /// IF condition THEN statement1; statement2; /// ``` fn parse_conditional_statement_block( - &mut self, + &self, terminal_keywords: &[Keyword], ) -> Result { let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?; @@ -793,7 +794,7 @@ impl<'a> Parser<'a> { /// Parse a BEGIN/END block or a sequence of statements /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements. pub(crate) fn parse_conditional_statements( - &mut self, + &self, terminal_keywords: &[Keyword], ) -> Result { let conditional_statements = if self.peek_keyword(Keyword::BEGIN) { @@ -817,7 +818,7 @@ impl<'a> Parser<'a> { /// Parse a `RAISE` statement. /// /// See [Statement::Raise] - pub fn parse_raise_stmt(&mut self) -> Result { + pub fn parse_raise_stmt(&self) -> Result { self.expect_keyword_is(Keyword::RAISE)?; let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) { @@ -830,7 +831,7 @@ impl<'a> Parser<'a> { Ok(Statement::Raise(RaiseStatement { value })) } - pub fn parse_comment(&mut self) -> Result { + pub fn parse_comment(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); self.expect_keyword_is(Keyword::ON)?; @@ -875,7 +876,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_flush(&mut self) -> Result { + pub fn parse_flush(&self) -> Result { let mut channel = None; let mut tables: Vec = vec![]; let mut read_lock = false; @@ -962,7 +963,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_msck(&mut self) -> Result { + pub fn parse_msck(&self) -> Result { let repair = self.parse_keyword(Keyword::REPAIR); self.expect_keyword_is(Keyword::TABLE)?; let table_name = self.parse_object_name(false)?; @@ -990,7 +991,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_truncate(&mut self) -> Result { + pub fn parse_truncate(&self) -> Result { let table = self.parse_keyword(Keyword::TABLE); let table_names = self @@ -1036,7 +1037,7 @@ impl<'a> Parser<'a> { .into()) } - fn parse_cascade_option(&mut self) -> Option { + fn parse_cascade_option(&self) -> Option { if self.parse_keyword(Keyword::CASCADE) { Some(CascadeOption::Cascade) } else if self.parse_keyword(Keyword::RESTRICT) { @@ -1047,7 +1048,7 @@ impl<'a> Parser<'a> { } pub fn parse_attach_duckdb_database_options( - &mut self, + &self, ) -> Result, ParserError> { if !self.consume_token(&Token::LParen) { return Ok(vec![]); @@ -1081,7 +1082,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_attach_duckdb_database(&mut self) -> Result { + pub fn parse_attach_duckdb_database(&self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let database_path = self.parse_identifier()?; @@ -1101,7 +1102,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_detach_duckdb_database(&mut self) -> Result { + pub fn parse_detach_duckdb_database(&self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let database_alias = self.parse_identifier()?; @@ -1112,7 +1113,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_attach_database(&mut self) -> Result { + pub fn parse_attach_database(&self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let database_file_name = self.parse_expr()?; self.expect_keyword_is(Keyword::AS)?; @@ -1124,7 +1125,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_analyze(&mut self) -> Result { + pub fn parse_analyze(&self) -> Result { let has_table_keyword = self.parse_keyword(Keyword::TABLE); let table_name = self.parse_object_name(false)?; let mut for_columns = false; @@ -1183,8 +1184,8 @@ impl<'a> Parser<'a> { } /// Parse a new expression including wildcard & qualified wildcard. - pub fn parse_wildcard_expr(&mut self) -> Result { - let index = self.index; + pub fn parse_wildcard_expr(&self) -> Result { + let index = self.index.get(); let next_token = self.next_token(); match next_token.token { @@ -1224,21 +1225,21 @@ impl<'a> Parser<'a> { _ => (), }; - self.index = index; + self.index.set(index); self.parse_expr() } /// Parse a new expression. - pub fn parse_expr(&mut self) -> Result { + pub fn parse_expr(&self) -> Result { self.parse_subexpr(self.dialect.prec_unknown()) } pub fn parse_expr_with_alias_and_order_by( - &mut self, + &self, ) -> Result { let expr = self.parse_expr()?; - fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + fn validator(explicit: bool, kw: &Keyword, _parser: &Parser) -> bool { explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw) } let alias = self.parse_optional_alias_inner(None, validator)?; @@ -1253,7 +1254,7 @@ impl<'a> Parser<'a> { } /// Parse tokens until the precedence changes. - pub fn parse_subexpr(&mut self, precedence: u8) -> Result { + pub fn parse_subexpr(&self, precedence: u8) -> Result { let _guard = self.recursion_counter.try_decrease()?; debug!("parsing expr"); let mut expr = self.parse_prefix()?; @@ -1280,7 +1281,7 @@ impl<'a> Parser<'a> { Ok(expr) } - pub fn parse_assert(&mut self) -> Result { + pub fn parse_assert(&self) -> Result { let condition = self.parse_expr()?; let message = if self.parse_keyword(Keyword::AS) { Some(self.parse_expr()?) @@ -1291,24 +1292,24 @@ impl<'a> Parser<'a> { Ok(Statement::Assert { condition, message }) } - pub fn parse_savepoint(&mut self) -> Result { + pub fn parse_savepoint(&self) -> Result { let name = self.parse_identifier()?; Ok(Statement::Savepoint { name }) } - pub fn parse_release(&mut self) -> Result { + pub fn parse_release(&self) -> Result { let _ = self.parse_keyword(Keyword::SAVEPOINT); let name = self.parse_identifier()?; Ok(Statement::ReleaseSavepoint { name }) } - pub fn parse_listen(&mut self) -> Result { + pub fn parse_listen(&self) -> Result { let channel = self.parse_identifier()?; Ok(Statement::LISTEN { channel }) } - pub fn parse_unlisten(&mut self) -> Result { + pub fn parse_unlisten(&self) -> Result { let channel = if self.consume_token(&Token::Mul) { Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string()) } else { @@ -1323,7 +1324,7 @@ impl<'a> Parser<'a> { Ok(Statement::UNLISTEN { channel }) } - pub fn parse_notify(&mut self) -> Result { + pub fn parse_notify(&self) -> Result { let channel = self.parse_identifier()?; let payload = if self.consume_token(&Token::Comma) { Some(self.parse_literal_string()?) @@ -1334,7 +1335,7 @@ impl<'a> Parser<'a> { } /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable] - pub fn parse_rename(&mut self) -> Result { + pub fn parse_rename(&self) -> Result { if self.peek_keyword(Keyword::TABLE) { self.expect_keyword(Keyword::TABLE)?; let rename_tables = self.parse_comma_separated(|parser| { @@ -1353,7 +1354,7 @@ impl<'a> Parser<'a> { /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect. /// Returns `None if no match is found. fn parse_expr_prefix_by_reserved_word( - &mut self, + &self, w: &Word, w_span: Span, ) -> Result, ParserError> { @@ -1453,7 +1454,7 @@ impl<'a> Parser<'a> { let struct_expr = self.parse_struct_literal()?; Ok(Some(struct_expr)) } - Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => { + Keyword::PRIOR if matches!(self.state.get(), ParserState::ConnectBy) => { let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?; Ok(Some(Expr::Prior(Box::new(expr)))) } @@ -1476,7 +1477,7 @@ impl<'a> Parser<'a> { /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect. fn parse_expr_prefix_by_unreserved_word( - &mut self, + &self, w: &Word, w_span: Span, ) -> Result { @@ -1519,7 +1520,7 @@ impl<'a> Parser<'a> { } /// Parse an expression prefix. - pub fn parse_prefix(&mut self) -> Result { + pub fn parse_prefix(&self) -> Result { // allow the dialect to override prefix parsing if let Some(prefix) = self.dialect.parse_prefix(self) { return prefix; @@ -1744,7 +1745,7 @@ impl<'a> Parser<'a> { } } - fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result { + fn parse_geometric_type(&self, kind: GeometricTypeKind) -> Result { Ok(Expr::TypedString(TypedString { data_type: DataType::GeometricType(kind), value: self.parse_value()?, @@ -1759,7 +1760,7 @@ impl<'a> Parser<'a> { /// or bracket notation. /// For example: `a.b.c`, `a.b[1]`. pub fn parse_compound_expr( - &mut self, + &self, root: Expr, mut chain: Vec, ) -> Result { @@ -1981,7 +1982,7 @@ impl<'a> Parser<'a> { } /// Returns true if the next tokens indicate the outer join operator `(+)`. - fn peek_outer_join_operator(&mut self) -> bool { + fn peek_outer_join_operator(&self) -> bool { if !self.dialect.supports_outer_join_operator() { return false; } @@ -1994,12 +1995,12 @@ impl<'a> Parser<'a> { /// If the next tokens indicates the outer join operator `(+)`, consume /// the tokens and return true. - fn maybe_parse_outer_join_operator(&mut self) -> bool { + fn maybe_parse_outer_join_operator(&self) -> bool { self.dialect.supports_outer_join_operator() && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen]) } - pub fn parse_utility_options(&mut self) -> Result, ParserError> { + pub fn parse_utility_options(&self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let options = self.parse_comma_separated(Self::parse_utility_option)?; self.expect_token(&Token::RParen)?; @@ -2007,7 +2008,7 @@ impl<'a> Parser<'a> { Ok(options) } - fn parse_utility_option(&mut self) -> Result { + fn parse_utility_option(&self) -> Result { let name = self.parse_identifier()?; let next_token = self.peek_token(); @@ -2022,7 +2023,7 @@ impl<'a> Parser<'a> { }) } - fn try_parse_expr_sub_query(&mut self) -> Result, ParserError> { + fn try_parse_expr_sub_query(&self) -> Result, ParserError> { if !self.peek_sub_query() { return Ok(None); } @@ -2030,7 +2031,7 @@ impl<'a> Parser<'a> { Ok(Some(Expr::Subquery(self.parse_query()?))) } - fn try_parse_lambda(&mut self) -> Result, ParserError> { + fn try_parse_lambda(&self) -> Result, ParserError> { if !self.dialect.supports_lambda_functions() { return Ok(None); } @@ -2052,7 +2053,7 @@ impl<'a> Parser<'a> { /// Scalar Function Calls /// Date, Time, and Timestamp Literals /// See - fn maybe_parse_odbc_body(&mut self) -> Result, ParserError> { + fn maybe_parse_odbc_body(&self) -> Result, ParserError> { // Attempt 1: Try to parse it as a function. if let Some(expr) = self.maybe_parse_odbc_fn_body()? { return Ok(Some(expr)); @@ -2071,7 +2072,7 @@ impl<'a> Parser<'a> { /// /// [ODBC Date, Time, and Timestamp Literals]: /// https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/date-time-and-timestamp-literals?view=sql-server-2017 - fn maybe_parse_odbc_body_datetime(&mut self) -> Result, ParserError> { + fn maybe_parse_odbc_body_datetime(&self) -> Result, ParserError> { self.maybe_parse(|p| { let token = p.next_token().clone(); let word_string = token.token.to_string(); @@ -2098,7 +2099,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017 - fn maybe_parse_odbc_fn_body(&mut self) -> Result, ParserError> { + fn maybe_parse_odbc_fn_body(&self) -> Result, ParserError> { self.maybe_parse(|p| { p.expect_keyword(Keyword::FN)?; let fn_name = p.parse_object_name(false)?; @@ -2108,11 +2109,11 @@ impl<'a> Parser<'a> { }) } - pub fn parse_function(&mut self, name: ObjectName) -> Result { + pub fn parse_function(&self, name: ObjectName) -> Result { self.parse_function_call(name).map(Expr::Function) } - fn parse_function_call(&mut self, name: ObjectName) -> Result { + fn parse_function_call(&self, name: ObjectName) -> Result { self.expect_token(&Token::LParen)?; // Snowflake permits a subquery to be passed as an argument without @@ -2201,7 +2202,7 @@ impl<'a> Parser<'a> { } /// Optionally parses a null treatment clause. - fn parse_null_treatment(&mut self) -> Result, ParserError> { + fn parse_null_treatment(&self) -> Result, ParserError> { match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) { Some(keyword) => { self.expect_keyword_is(Keyword::NULLS)?; @@ -2216,7 +2217,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_time_functions(&mut self, name: ObjectName) -> Result { + pub fn parse_time_functions(&self, name: ObjectName) -> Result { let args = if self.consume_token(&Token::LParen) { FunctionArguments::List(self.parse_function_argument_list()?) } else { @@ -2234,7 +2235,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_window_frame_units(&mut self) -> Result { + pub fn parse_window_frame_units(&self) -> Result { let next_token = self.next_token(); match &next_token.token { Token::Word(w) => match w.keyword { @@ -2247,7 +2248,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_window_frame(&mut self) -> Result { + pub fn parse_window_frame(&self) -> Result { let units = self.parse_window_frame_units()?; let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) { let start_bound = self.parse_window_frame_bound()?; @@ -2265,7 +2266,7 @@ impl<'a> Parser<'a> { } /// Parse `CURRENT ROW` or `{ | UNBOUNDED } { PRECEDING | FOLLOWING }` - pub fn parse_window_frame_bound(&mut self) -> Result { + pub fn parse_window_frame_bound(&self) -> Result { if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) { Ok(WindowFrameBound::CurrentRow) } else { @@ -2288,7 +2289,7 @@ impl<'a> Parser<'a> { } /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr. - fn parse_group_by_expr(&mut self) -> Result { + fn parse_group_by_expr(&self) -> Result { if self.dialect.supports_group_by_expr() { if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) { self.expect_token(&Token::LParen)?; @@ -2323,7 +2324,7 @@ impl<'a> Parser<'a> { /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail. /// If `allow_empty` is true, then an empty tuple is allowed. fn parse_tuple( - &mut self, + &self, lift_singleton: bool, allow_empty: bool, ) -> Result, ParserError> { @@ -2353,7 +2354,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_case_expr(&mut self) -> Result { + pub fn parse_case_expr(&self) -> Result { let case_token = AttachedToken(self.get_current_token().clone()); let mut operand = None; if !self.parse_keyword(Keyword::WHEN) { @@ -2385,7 +2386,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_optional_cast_format(&mut self) -> Result, ParserError> { + pub fn parse_optional_cast_format(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::FORMAT) { let value = self.parse_value()?.value; match self.parse_optional_time_zone()? { @@ -2397,7 +2398,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_time_zone(&mut self) -> Result, ParserError> { + pub fn parse_optional_time_zone(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) { self.parse_value().map(|v| Some(v.value)) } else { @@ -2406,7 +2407,7 @@ impl<'a> Parser<'a> { } /// mssql-like convert function - fn parse_mssql_convert(&mut self, is_try: bool) -> Result { + fn parse_mssql_convert(&self, is_try: bool) -> Result { self.expect_token(&Token::LParen)?; let data_type = self.parse_data_type()?; self.expect_token(&Token::Comma)?; @@ -2431,7 +2432,7 @@ impl<'a> Parser<'a> { /// - `CONVERT('héhé' USING utf8mb4)` (MySQL) /// - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL) /// - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first - pub fn parse_convert_expr(&mut self, is_try: bool) -> Result { + pub fn parse_convert_expr(&self, is_try: bool) -> Result { if self.dialect.convert_type_before_value() { return self.parse_mssql_convert(is_try); } @@ -2468,7 +2469,7 @@ impl<'a> Parser<'a> { } /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)` - pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result { + pub fn parse_cast_expr(&self, kind: CastKind) -> Result { self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; self.expect_keyword_is(Keyword::AS)?; @@ -2484,7 +2485,7 @@ impl<'a> Parser<'a> { } /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`. - pub fn parse_exists_expr(&mut self, negated: bool) -> Result { + pub fn parse_exists_expr(&self, negated: bool) -> Result { self.expect_token(&Token::LParen)?; let exists_node = Expr::Exists { negated, @@ -2494,7 +2495,7 @@ impl<'a> Parser<'a> { Ok(exists_node) } - pub fn parse_extract_expr(&mut self) -> Result { + pub fn parse_extract_expr(&self) -> Result { self.expect_token(&Token::LParen)?; let field = self.parse_date_time_field()?; @@ -2519,7 +2520,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result { + pub fn parse_ceil_floor_expr(&self, is_ceil: bool) -> Result { self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; // Parse `CEIL/FLOOR(expr)` @@ -2553,7 +2554,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_position_expr(&mut self, ident: Ident) -> Result { + pub fn parse_position_expr(&self, ident: Ident) -> Result { let between_prec = self.dialect.prec_value(Precedence::Between); let position_expr = self.maybe_parse(|p| { // PARSE SELECT POSITION('@' in field) @@ -2578,7 +2579,7 @@ impl<'a> Parser<'a> { } // { SUBSTRING | SUBSTR } ( [FROM 1] [FOR 3]) - pub fn parse_substring(&mut self) -> Result { + pub fn parse_substring(&self) -> Result { let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? { Keyword::SUBSTR => true, Keyword::SUBSTRING => false, @@ -2610,7 +2611,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_overlay_expr(&mut self) -> Result { + pub fn parse_overlay_expr(&self) -> Result { // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3]) self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; @@ -2637,7 +2638,7 @@ impl<'a> Parser<'a> { /// TRIM ('text') /// TRIM(, [, characters]) -- only Snowflake or BigQuery /// ``` - pub fn parse_trim_expr(&mut self) -> Result { + pub fn parse_trim_expr(&self) -> Result { self.expect_token(&Token::LParen)?; let mut trim_where = None; if let Token::Word(word) = self.peek_token().token { @@ -2678,7 +2679,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_trim_where(&mut self) -> Result { + pub fn parse_trim_where(&self) -> Result { let next_token = self.next_token(); match &next_token.token { Token::Word(w) => match w.keyword { @@ -2693,13 +2694,13 @@ impl<'a> Parser<'a> { /// Parses an array expression `[ex1, ex2, ..]` /// if `named` is `true`, came from an expression like `ARRAY[ex1, ex2]` - pub fn parse_array_expr(&mut self, named: bool) -> Result { + pub fn parse_array_expr(&self, named: bool) -> Result { let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?; self.expect_token(&Token::RBracket)?; Ok(Expr::Array(Array { elem: exprs, named })) } - pub fn parse_listagg_on_overflow(&mut self) -> Result, ParserError> { + pub fn parse_listagg_on_overflow(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) { if self.parse_keyword(Keyword::ERROR) { Ok(Some(ListAggOnOverflow::Error)) @@ -2737,7 +2738,7 @@ impl<'a> Parser<'a> { // operator, interval qualifiers, and the ceil/floor operations. // EXTRACT supports a wider set of date/time fields than interval qualifiers, // so this function may need to be split in two. - pub fn parse_date_time_field(&mut self) -> Result { + pub fn parse_date_time_field(&self) -> Result { let next_token = self.next_token(); match &next_token.token { Token::Word(w) => match w.keyword { @@ -2810,7 +2811,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_not(&mut self) -> Result { + pub fn parse_not(&self) -> Result { match self.peek_token().token { Token::Word(w) => match w.keyword { Keyword::EXISTS => { @@ -2841,7 +2842,7 @@ impl<'a> Parser<'a> { /// -- Function call using the ODBC syntax. /// { fn CONCAT('foo', 'bar') } /// ``` - fn parse_lbrace_expr(&mut self) -> Result { + fn parse_lbrace_expr(&self) -> Result { let token = self.expect_token(&Token::LBrace)?; if let Some(fn_expr) = self.maybe_parse_odbc_body()? { @@ -2862,7 +2863,7 @@ impl<'a> Parser<'a> { /// # Errors /// This method will raise an error if the column list is empty or with invalid identifiers, /// the match expression is not a literal string, or if the search modifier is not valid. - pub fn parse_match_against(&mut self) -> Result { + pub fn parse_match_against(&self) -> Result { let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?; self.expect_keyword_is(Keyword::AGAINST)?; @@ -2921,7 +2922,7 @@ impl<'a> Parser<'a> { /// ``` /// /// Note that we do not currently attempt to parse the quoted value. - pub fn parse_interval(&mut self) -> Result { + pub fn parse_interval(&self) -> Result { // The SQL standard allows an optional sign before the value string, but // it is not clear if any implementations support that syntax, so we // don't currently try to parse it. (The sign can instead be included @@ -2991,7 +2992,7 @@ impl<'a> Parser<'a> { /// Peek at the next token and determine if it is a temporal unit /// like `second`. - pub fn next_token_is_temporal_unit(&mut self) -> bool { + pub fn next_token_is_temporal_unit(&self) -> bool { if let Token::Word(word) = self.peek_token().token { matches!( word.keyword, @@ -3042,7 +3043,7 @@ impl<'a> Parser<'a> { /// -- typeless /// STRUCT( expr1 [AS field_name] [, ... ]) /// ``` - fn parse_struct_literal(&mut self) -> Result { + fn parse_struct_literal(&self) -> Result { // Parse the fields definition if exist `<[field_name] field_type, ...>` self.prev_token(); let (fields, trailing_bracket) = @@ -3076,7 +3077,7 @@ impl<'a> Parser<'a> { /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax - fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result { + fn parse_struct_field_expr(&self, typed_syntax: bool) -> Result { let expr = self.parse_expr()?; if self.parse_keyword(Keyword::AS) { if typed_syntax { @@ -3108,11 +3109,11 @@ impl<'a> Parser<'a> { /// STRUCT<[field_name] field_type> /// ``` fn parse_struct_type_def( - &mut self, + &self, mut elem_parser: F, ) -> Result<(Vec, MatchedTrailingBracket), ParserError> where - F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>, + F: FnMut(&Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>, { self.expect_keyword_is(Keyword::STRUCT)?; @@ -3139,7 +3140,7 @@ impl<'a> Parser<'a> { } /// Duckdb Struct Data Type - fn parse_duckdb_struct_type_def(&mut self) -> Result, ParserError> { + fn parse_duckdb_struct_type_def(&self) -> Result, ParserError> { self.expect_keyword_is(Keyword::STRUCT)?; self.expect_token(&Token::LParen)?; let struct_body = self.parse_comma_separated(|parser| { @@ -3165,9 +3166,7 @@ impl<'a> Parser<'a> { /// /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple - fn parse_struct_field_def( - &mut self, - ) -> Result<(StructField, MatchedTrailingBracket), ParserError> { + fn parse_struct_field_def(&self) -> Result<(StructField, MatchedTrailingBracket), ParserError> { // Look beyond the next item to infer whether both field name // and type are specified. let is_anonymous_field = !matches!( @@ -3203,7 +3202,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [1]: https://duckdb.org/docs/sql/data_types/union.html - fn parse_union_type_def(&mut self) -> Result, ParserError> { + fn parse_union_type_def(&self) -> Result, ParserError> { self.expect_keyword_is(Keyword::UNION)?; self.expect_token(&Token::LParen)?; @@ -3230,7 +3229,7 @@ impl<'a> Parser<'a> { /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters - fn parse_dictionary(&mut self) -> Result { + fn parse_dictionary(&self) -> Result { self.expect_token(&Token::LBrace)?; let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?; @@ -3250,7 +3249,7 @@ impl<'a> Parser<'a> { /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters - fn parse_dictionary_field(&mut self) -> Result { + fn parse_dictionary_field(&self) -> Result { let key = self.parse_identifier()?; self.expect_token(&Token::Colon)?; @@ -3272,7 +3271,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps - fn parse_duckdb_map_literal(&mut self) -> Result { + fn parse_duckdb_map_literal(&self) -> Result { self.expect_token(&Token::LBrace)?; let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?; self.expect_token(&Token::RBrace)?; @@ -3288,7 +3287,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps - fn parse_duckdb_map_field(&mut self) -> Result { + fn parse_duckdb_map_field(&self) -> Result { let key = self.parse_expr()?; self.expect_token(&Token::Colon)?; @@ -3310,7 +3309,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map - fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> { + fn parse_click_house_map_def(&self) -> Result<(DataType, DataType), ParserError> { self.expect_keyword_is(Keyword::MAP)?; self.expect_token(&Token::LParen)?; let key_data_type = self.parse_data_type()?; @@ -3330,7 +3329,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple - fn parse_click_house_tuple_def(&mut self) -> Result, ParserError> { + fn parse_click_house_tuple_def(&self) -> Result, ParserError> { self.expect_keyword_is(Keyword::TUPLE)?; self.expect_token(&Token::LParen)?; let mut field_defs = vec![]; @@ -3351,7 +3350,7 @@ impl<'a> Parser<'a> { /// matched `trailing_bracket` argument). It returns whether there is a trailing /// left to be matched - (i.e. if '>>' was matched). fn expect_closing_angle_bracket( - &mut self, + &self, trailing_bracket: MatchedTrailingBracket, ) -> Result { let trailing_bracket = if !trailing_bracket.0 { @@ -3374,7 +3373,7 @@ impl<'a> Parser<'a> { } /// Parse an operator following an expression - pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result { + pub fn parse_infix(&self, expr: Expr, precedence: u8) -> Result { // allow the dialect to override infix parsing if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) { return infix; @@ -3755,7 +3754,7 @@ impl<'a> Parser<'a> { } /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO` - pub fn parse_escape_char(&mut self) -> Result, ParserError> { + pub fn parse_escape_char(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::ESCAPE) { Ok(Some(self.parse_value()?.into())) } else { @@ -3772,7 +3771,7 @@ impl<'a> Parser<'a> { /// * `[l:u:s]` /// /// Parser is right after `[` - fn parse_subscript_inner(&mut self) -> Result { + fn parse_subscript_inner(&self) -> Result { // at either `:(rest)` or `:(rest)]` let lower_bound = if self.consume_token(&Token::Colon) { None @@ -3838,7 +3837,7 @@ impl<'a> Parser<'a> { /// Parse a multi-dimension array accessing like `[1:3][1][1]` pub fn parse_multi_dim_subscript( - &mut self, + &self, chain: &mut Vec, ) -> Result<(), ParserError> { while self.consume_token(&Token::LBracket) { @@ -3850,13 +3849,13 @@ impl<'a> Parser<'a> { /// Parses an array subscript like `[1:3]` /// /// Parser is right after `[` - fn parse_subscript(&mut self, chain: &mut Vec) -> Result<(), ParserError> { + fn parse_subscript(&self, chain: &mut Vec) -> Result<(), ParserError> { let subscript = self.parse_subscript_inner()?; chain.push(AccessExpr::Subscript(subscript)); Ok(()) } - fn parse_json_path_object_key(&mut self) -> Result { + fn parse_json_path_object_key(&self) -> Result { let token = self.next_token(); match token.token { Token::Word(Word { @@ -3880,7 +3879,7 @@ impl<'a> Parser<'a> { } } - fn parse_json_access(&mut self, expr: Expr) -> Result { + fn parse_json_access(&self, expr: Expr) -> Result { let path = self.parse_json_path()?; Ok(Expr::JsonAccess { value: Box::new(expr), @@ -3888,7 +3887,7 @@ impl<'a> Parser<'a> { }) } - fn parse_json_path(&mut self) -> Result { + fn parse_json_path(&self) -> Result { let mut path = Vec::new(); loop { match self.next_token().token { @@ -3916,7 +3915,7 @@ impl<'a> Parser<'a> { } /// Parses the parens following the `[ NOT ] IN` operator. - pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result { + pub fn parse_in(&self, expr: Expr, negated: bool) -> Result { // BigQuery allows `IN UNNEST(array_expression)` // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators if self.parse_keyword(Keyword::UNNEST) { @@ -3951,7 +3950,7 @@ impl<'a> Parser<'a> { } /// Parses `BETWEEN AND `, assuming the `BETWEEN` keyword was already consumed. - pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result { + pub fn parse_between(&self, expr: Expr, negated: bool) -> Result { // Stop parsing subexpressions for and on tokens with // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc. let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?; @@ -3966,7 +3965,7 @@ impl<'a> Parser<'a> { } /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`. - pub fn parse_pg_cast(&mut self, expr: Expr) -> Result { + pub fn parse_pg_cast(&self, expr: Expr) -> Result { Ok(Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(expr), @@ -4032,7 +4031,7 @@ impl<'a> Parser<'a> { /// /// See [`Self::peek_token`] for an example. pub fn peek_tokens_with_location(&self) -> [TokenWithSpan; N] { - let mut index = self.index; + let mut index = self.index.get(); core::array::from_fn(|_| loop { let token = self.tokens.get(index); index += 1; @@ -4055,7 +4054,7 @@ impl<'a> Parser<'a> { /// /// See [`Self::peek_tokens`] for an example. pub fn peek_tokens_ref(&self) -> [&TokenWithSpan; N] { - let mut index = self.index; + let mut index = self.index.get(); core::array::from_fn(|_| loop { let token = self.tokens.get(index); index += 1; @@ -4077,7 +4076,7 @@ impl<'a> Parser<'a> { /// Return nth non-whitespace token that has not yet been processed pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan { - let mut index = self.index; + let mut index = self.index.get(); loop { index += 1; match self.tokens.get(index - 1) { @@ -4104,7 +4103,7 @@ impl<'a> Parser<'a> { /// Return nth token, possibly whitespace, that has not yet been processed. pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan { self.tokens - .get(self.index + n) + .get(self.index.get() + n) .cloned() .unwrap_or(TokenWithSpan { token: Token::EOF, @@ -4115,10 +4114,10 @@ impl<'a> Parser<'a> { /// Return true if the next tokens exactly `expected` /// /// Does not advance the current token. - fn peek_keywords(&mut self, expected: &[Keyword]) -> bool { - let index = self.index; + fn peek_keywords(&self, expected: &[Keyword]) -> bool { + let index = self.index.get(); let matched = self.parse_keywords(expected); - self.index = index; + self.index.set(index); matched } @@ -4126,7 +4125,7 @@ impl<'a> Parser<'a> { /// /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to /// avoid the copy. - pub fn next_token(&mut self) -> TokenWithSpan { + pub fn next_token(&self) -> TokenWithSpan { self.advance_token(); self.get_current_token().clone() } @@ -4136,22 +4135,22 @@ impl<'a> Parser<'a> { /// This can be used with APIs that expect an index, such as /// [`Self::token_at`] pub fn get_current_index(&self) -> usize { - self.index.saturating_sub(1) + self.index.get().saturating_sub(1) } /// Return the next unprocessed token, possibly whitespace. - pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> { - self.index += 1; - self.tokens.get(self.index - 1) + pub fn next_token_no_skip(&self) -> Option<&TokenWithSpan> { + self.index.set(self.index.get() + 1); + self.tokens.get(self.index.get() - 1) } /// Advances the current token to the next non-whitespace token /// /// See [`Self::get_current_token`] to get the current token after advancing - pub fn advance_token(&mut self) { + pub fn advance_token(&self) { loop { - self.index += 1; - match self.tokens.get(self.index - 1) { + self.index.set(self.index.get() + 1); + match self.tokens.get(self.index.get() - 1) { Some(TokenWithSpan { token: Token::Whitespace(_), span: _, @@ -4165,21 +4164,21 @@ impl<'a> Parser<'a> { /// /// Does not advance the current token. pub fn get_current_token(&self) -> &TokenWithSpan { - self.token_at(self.index.saturating_sub(1)) + self.token_at(self.index.get().saturating_sub(1)) } /// Returns a reference to the previous token /// /// Does not advance the current token. pub fn get_previous_token(&self) -> &TokenWithSpan { - self.token_at(self.index.saturating_sub(2)) + self.token_at(self.index.get().saturating_sub(2)) } /// Returns a reference to the next token /// /// Does not advance the current token. pub fn get_next_token(&self) -> &TokenWithSpan { - self.token_at(self.index) + self.token_at(self.index.get()) } /// Seek back the last one non-whitespace token. @@ -4188,14 +4187,14 @@ impl<'a> Parser<'a> { /// after `next_token()` indicates an EOF. /// // TODO rename to backup_token and deprecate prev_token? - pub fn prev_token(&mut self) { + pub fn prev_token(&self) { loop { - assert!(self.index > 0); - self.index -= 1; + assert!(self.index.get() > 0); + self.index.set(self.index.get() - 1); if let Some(TokenWithSpan { token: Token::Whitespace(_), span: _, - }) = self.tokens.get(self.index) + }) = self.tokens.get(self.index.get()) { continue; } @@ -4231,7 +4230,7 @@ impl<'a> Parser<'a> { /// If the current token is the `expected` keyword, consume it and returns /// true. Otherwise, no tokens are consumed and returns false. #[must_use] - pub fn parse_keyword(&mut self, expected: Keyword) -> bool { + pub fn parse_keyword(&self, expected: Keyword) -> bool { if self.peek_keyword(expected) { self.advance_token(); true @@ -4252,7 +4251,7 @@ impl<'a> Parser<'a> { /// Note that if the length of `tokens` is too long, this function will /// not be efficient as it does a loop on the tokens with `peek_nth_token` /// each time. - pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool { + pub fn parse_keyword_with_tokens(&self, expected: Keyword, tokens: &[Token]) -> bool { self.keyword_with_tokens(expected, tokens, true) } @@ -4260,11 +4259,11 @@ impl<'a> Parser<'a> { /// without consuming them. /// /// See [Self::parse_keyword_with_tokens] for details. - pub(crate) fn peek_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool { + pub(crate) fn peek_keyword_with_tokens(&self, expected: Keyword, tokens: &[Token]) -> bool { self.keyword_with_tokens(expected, tokens, false) } - fn keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token], consume: bool) -> bool { + fn keyword_with_tokens(&self, expected: Keyword, tokens: &[Token], consume: bool) -> bool { match &self.peek_token_ref().token { Token::Word(w) if expected == w.keyword => { for (idx, token) in tokens.iter().enumerate() { @@ -4289,13 +4288,13 @@ impl<'a> Parser<'a> { /// sequence, consume them and returns true. Otherwise, no tokens are /// consumed and returns false #[must_use] - pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool { - let index = self.index; + pub fn parse_keywords(&self, keywords: &[Keyword]) -> bool { + let index = self.index.get(); for &keyword in keywords { if !self.parse_keyword(keyword) { // println!("parse_keywords aborting .. did not find {:?}", keyword); // reset index and return immediately - self.index = index; + self.index.set(index); return false; } } @@ -4318,7 +4317,7 @@ impl<'a> Parser<'a> { /// and return the keyword that matches. Otherwise, no tokens are consumed /// and returns [`None`]. #[must_use] - pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option { + pub fn parse_one_of_keywords(&self, keywords: &[Keyword]) -> Option { match &self.peek_token_ref().token { Token::Word(w) => { keywords @@ -4335,7 +4334,7 @@ impl<'a> Parser<'a> { /// If the current token is one of the expected keywords, consume the token /// and return the keyword that matches. Otherwise, return an error. - pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result { + pub fn expect_one_of_keywords(&self, keywords: &[Keyword]) -> Result { if let Some(keyword) = self.parse_one_of_keywords(keywords) { Ok(keyword) } else { @@ -4351,7 +4350,7 @@ impl<'a> Parser<'a> { /// Otherwise, return an error. /// // todo deprecate in favor of expected_keyword_is - pub fn expect_keyword(&mut self, expected: Keyword) -> Result { + pub fn expect_keyword(&self, expected: Keyword) -> Result { if self.parse_keyword(expected) { Ok(self.get_current_token().clone()) } else { @@ -4364,7 +4363,7 @@ impl<'a> Parser<'a> { /// /// This differs from expect_keyword only in that the matched keyword /// token is not returned. - pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> { + pub fn expect_keyword_is(&self, expected: Keyword) -> Result<(), ParserError> { if self.parse_keyword(expected) { Ok(()) } else { @@ -4374,7 +4373,7 @@ impl<'a> Parser<'a> { /// If the current and subsequent tokens exactly match the `keywords` /// sequence, consume them and returns Ok. Otherwise, return an Error. - pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> { + pub fn expect_keywords(&self, expected: &[Keyword]) -> Result<(), ParserError> { for &kw in expected { self.expect_keyword_is(kw)?; } @@ -4385,7 +4384,7 @@ impl<'a> Parser<'a> { /// /// See [Self::advance_token] to consume the token unconditionally #[must_use] - pub fn consume_token(&mut self, expected: &Token) -> bool { + pub fn consume_token(&self, expected: &Token) -> bool { if self.peek_token_ref() == expected { self.advance_token(); true @@ -4398,11 +4397,11 @@ impl<'a> Parser<'a> { /// sequence, consume them and returns true. Otherwise, no tokens are /// consumed and returns false #[must_use] - pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool { - let index = self.index; + pub fn consume_tokens(&self, tokens: &[Token]) -> bool { + let index = self.index.get(); for token in tokens { if !self.consume_token(token) { - self.index = index; + self.index.set(index); return false; } } @@ -4410,7 +4409,7 @@ impl<'a> Parser<'a> { } /// Bail out if the current token is not an expected keyword, or consume it if it is - pub fn expect_token(&mut self, expected: &Token) -> Result { + pub fn expect_token(&self, expected: &Token) -> Result { if self.peek_token_ref() == expected { Ok(self.next_token()) } else { @@ -4431,7 +4430,7 @@ impl<'a> Parser<'a> { } /// Parse a comma-separated list of 1+ SelectItem - pub fn parse_projection(&mut self) -> Result, ParserError> { + pub fn parse_projection(&self) -> Result, ParserError> { // BigQuery and Snowflake allow trailing commas, but only in project lists // e.g. `SELECT 1, 2, FROM t` // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas @@ -4447,7 +4446,7 @@ impl<'a> Parser<'a> { ) } - pub fn parse_actions_list(&mut self) -> Result, ParserError> { + pub fn parse_actions_list(&self) -> Result, ParserError> { let mut values = vec![]; loop { values.push(self.parse_grant_permission()?); @@ -4471,7 +4470,7 @@ impl<'a> Parser<'a> { } /// Parse a list of [TableWithJoins] - fn parse_table_with_joins(&mut self) -> Result, ParserError> { + fn parse_table_with_joins(&self) -> Result, ParserError> { let trailing_commas = self.dialect.supports_from_trailing_commas(); self.parse_comma_separated_with_trailing_commas( @@ -4488,12 +4487,12 @@ impl<'a> Parser<'a> { /// /// Returns true if there is a next element fn is_parse_comma_separated_end_with_trailing_commas( - &mut self, + &self, trailing_commas: bool, is_reserved_keyword: &R, ) -> bool where - R: Fn(&Keyword, &mut Parser) -> bool, + R: Fn(&Keyword, &Parser) -> bool, { if !self.consume_token(&Token::Comma) { true @@ -4516,7 +4515,7 @@ impl<'a> Parser<'a> { /// Parse the comma of a comma-separated syntax element. /// Returns true if there is a next element - fn is_parse_comma_separated_end(&mut self) -> bool { + fn is_parse_comma_separated_end(&self) -> bool { self.is_parse_comma_separated_end_with_trailing_commas( self.options.trailing_commas, &Self::is_reserved_for_column_alias, @@ -4524,9 +4523,9 @@ impl<'a> Parser<'a> { } /// Parse a comma-separated list of 1+ items accepted by `F` - pub fn parse_comma_separated(&mut self, f: F) -> Result, ParserError> + pub fn parse_comma_separated(&self, f: F) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { self.parse_comma_separated_with_trailing_commas( f, @@ -4540,14 +4539,14 @@ impl<'a> Parser<'a> { /// keyword is a reserved keyword. /// Allows for control over trailing commas. fn parse_comma_separated_with_trailing_commas( - &mut self, + &self, mut f: F, trailing_commas: bool, is_reserved_keyword: R, ) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, - R: Fn(&Keyword, &mut Parser) -> bool, + F: FnMut(&Parser<'a>) -> Result, + R: Fn(&Keyword, &Parser) -> bool, { let mut values = vec![]; loop { @@ -4563,9 +4562,9 @@ impl<'a> Parser<'a> { } /// Parse a period-separated list of 1+ items accepted by `F` - fn parse_period_separated(&mut self, mut f: F) -> Result, ParserError> + fn parse_period_separated(&self, mut f: F) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { let mut values = vec![]; loop { @@ -4579,12 +4578,12 @@ impl<'a> Parser<'a> { /// Parse a keyword-separated list of 1+ items accepted by `F` pub fn parse_keyword_separated( - &mut self, + &self, keyword: Keyword, mut f: F, ) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { let mut values = vec![]; loop { @@ -4596,9 +4595,9 @@ impl<'a> Parser<'a> { Ok(values) } - pub fn parse_parenthesized(&mut self, mut f: F) -> Result + pub fn parse_parenthesized(&self, mut f: F) -> Result where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { self.expect_token(&Token::LParen)?; let res = f(self)?; @@ -4609,12 +4608,12 @@ impl<'a> Parser<'a> { /// Parse a comma-separated list of 0+ items accepted by `F` /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...) pub fn parse_comma_separated0( - &mut self, + &self, f: F, end_token: Token, ) -> Result, ParserError> where - F: FnMut(&mut Parser<'a>) -> Result, + F: FnMut(&Parser<'a>) -> Result, { if self.peek_token().token == end_token { return Ok(vec![]); @@ -4632,7 +4631,7 @@ impl<'a> Parser<'a> { /// If the next token is any of `terminal_keywords` then no more /// statements will be parsed. pub(crate) fn parse_statement_list( - &mut self, + &self, terminal_keywords: &[Keyword], ) -> Result, ParserError> { let mut values = vec![]; @@ -4656,16 +4655,16 @@ impl<'a> Parser<'a> { /// Default implementation of a predicate that returns true if /// the specified keyword is reserved for column alias. /// See [Dialect::is_column_alias] - fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool { + fn is_reserved_for_column_alias(kw: &Keyword, parser: &Parser) -> bool { !parser.dialect.is_column_alias(kw, parser) } /// Run a parser method `f`, reverting back to the current position if unsuccessful. /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`. /// Returns `Ok(None)` if `f` returns any other error. - pub fn maybe_parse(&mut self, f: F) -> Result, ParserError> + pub fn maybe_parse(&self, f: F) -> Result, ParserError> where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { match self.try_parse(f) { Ok(t) => Ok(Some(t)), @@ -4675,16 +4674,16 @@ impl<'a> Parser<'a> { } /// Run a parser method `f`, reverting back to the current position if unsuccessful. - pub fn try_parse(&mut self, mut f: F) -> Result + pub fn try_parse(&self, mut f: F) -> Result where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { - let index = self.index; + let index = self.index.get(); match f(self) { Ok(t) => Ok(t), Err(e) => { // Unwind stack if limit exceeded - self.index = index; + self.index.set(index); Err(e) } } @@ -4692,7 +4691,7 @@ impl<'a> Parser<'a> { /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found. - pub fn parse_all_or_distinct(&mut self) -> Result, ParserError> { + pub fn parse_all_or_distinct(&self) -> Result, ParserError> { let loc = self.peek_token().span.start; let all = self.parse_keyword(Keyword::ALL); let distinct = self.parse_keyword(Keyword::DISTINCT); @@ -4719,7 +4718,7 @@ impl<'a> Parser<'a> { } /// Parse a SQL CREATE statement - pub fn parse_create(&mut self) -> Result { + pub fn parse_create(&self) -> Result { let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]); let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]); let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some(); @@ -4798,7 +4797,7 @@ impl<'a> Parser<'a> { } } - fn parse_create_user(&mut self, or_replace: bool) -> Result { + fn parse_create_user(&self, or_replace: bool) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; let options = self @@ -4828,7 +4827,7 @@ impl<'a> Parser<'a> { /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details. pub fn parse_create_secret( - &mut self, + &self, or_replace: bool, temporary: bool, persistent: bool, @@ -4859,7 +4858,7 @@ impl<'a> Parser<'a> { let mut options = Vec::new(); if self.consume_token(&Token::Comma) { - options.append(&mut self.parse_comma_separated(|p| { + options.extend(self.parse_comma_separated(|p| { let key = p.parse_identifier()?; let value = p.parse_identifier()?; Ok(SecretOption { key, value }) @@ -4886,7 +4885,7 @@ impl<'a> Parser<'a> { } /// Parse a CACHE TABLE statement - pub fn parse_cache_table(&mut self) -> Result { + pub fn parse_cache_table(&self) -> Result { let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None); if self.parse_keyword(Keyword::TABLE) { let table_name = self.parse_object_name(false)?; @@ -4962,7 +4961,7 @@ impl<'a> Parser<'a> { } /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX` - pub fn parse_as_query(&mut self) -> Result<(bool, Box), ParserError> { + pub fn parse_as_query(&self) -> Result<(bool, Box), ParserError> { match self.peek_token().token { Token::Word(word) => match word.keyword { Keyword::AS => { @@ -4976,7 +4975,7 @@ impl<'a> Parser<'a> { } /// Parse a UNCACHE TABLE statement - pub fn parse_uncache_table(&mut self) -> Result { + pub fn parse_uncache_table(&self) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -4987,7 +4986,7 @@ impl<'a> Parser<'a> { } /// SQLite-specific `CREATE VIRTUAL TABLE` - pub fn parse_create_virtual_table(&mut self) -> Result { + pub fn parse_create_virtual_table(&self) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -5006,7 +5005,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_create_schema(&mut self) -> Result { + pub fn parse_create_schema(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let schema_name = self.parse_schema_name()?; @@ -5045,7 +5044,7 @@ impl<'a> Parser<'a> { }) } - fn parse_schema_name(&mut self) -> Result { + fn parse_schema_name(&self) -> Result { if self.parse_keyword(Keyword::AUTHORIZATION) { Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?)) } else { @@ -5062,7 +5061,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_create_database(&mut self) -> Result { + pub fn parse_create_database(&self) -> Result { let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let db_name = self.parse_object_name(false)?; let mut location = None; @@ -5107,7 +5106,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_create_function_using( - &mut self, + &self, ) -> Result, ParserError> { if !self.parse_keyword(Keyword::USING) { return Ok(None); @@ -5129,7 +5128,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_function( - &mut self, + &self, or_alter: bool, or_replace: bool, temporary: bool, @@ -5154,7 +5153,7 @@ impl<'a> Parser<'a> { /// /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html fn parse_postgres_create_function( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { @@ -5281,7 +5280,7 @@ impl<'a> Parser<'a> { /// /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction fn parse_hive_create_function( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { @@ -5315,7 +5314,7 @@ impl<'a> Parser<'a> { /// /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement fn parse_bigquery_create_function( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { @@ -5390,7 +5389,7 @@ impl<'a> Parser<'a> { /// /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql fn parse_mssql_create_function( - &mut self, + &self, or_alter: bool, or_replace: bool, temporary: bool, @@ -5476,33 +5475,32 @@ impl<'a> Parser<'a> { } fn parse_create_function_name_and_params( - &mut self, + &self, ) -> Result<(ObjectName, Vec), ParserError> { let name = self.parse_object_name(false)?; - let parse_function_param = - |parser: &mut Parser| -> Result { - let name = parser.parse_identifier()?; - let data_type = parser.parse_data_type()?; - let default_expr = if parser.consume_token(&Token::Eq) { - Some(parser.parse_expr()?) - } else { - None - }; - - Ok(OperateFunctionArg { - mode: None, - name: Some(name), - data_type, - default_expr, - }) + let parse_function_param = |parser: &Parser| -> Result { + let name = parser.parse_identifier()?; + let data_type = parser.parse_data_type()?; + let default_expr = if parser.consume_token(&Token::Eq) { + Some(parser.parse_expr()?) + } else { + None }; + + Ok(OperateFunctionArg { + mode: None, + name: Some(name), + data_type, + default_expr, + }) + }; self.expect_token(&Token::LParen)?; let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?; self.expect_token(&Token::RParen)?; Ok((name, args)) } - fn parse_function_arg(&mut self) -> Result { + fn parse_function_arg(&self) -> Result { let mode = if self.parse_keyword(Keyword::IN) { Some(ArgMode::In) } else if self.parse_keyword(Keyword::OUT) { @@ -5552,7 +5550,7 @@ impl<'a> Parser<'a> { /// ```sql /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ] /// ``` - pub fn parse_drop_trigger(&mut self) -> Result { + pub fn parse_drop_trigger(&self) -> Result { if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect) { self.prev_token(); @@ -5581,7 +5579,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_trigger( - &mut self, + &self, temporary: bool, or_alter: bool, or_replace: bool, @@ -5670,7 +5668,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_trigger_period(&mut self) -> Result { + pub fn parse_trigger_period(&self) -> Result { Ok( match self.expect_one_of_keywords(&[ Keyword::FOR, @@ -5689,7 +5687,7 @@ impl<'a> Parser<'a> { ) } - pub fn parse_trigger_event(&mut self) -> Result { + pub fn parse_trigger_event(&self) -> Result { Ok( match self.expect_one_of_keywords(&[ Keyword::INSERT, @@ -5713,7 +5711,7 @@ impl<'a> Parser<'a> { ) } - pub fn parse_trigger_referencing(&mut self) -> Result, ParserError> { + pub fn parse_trigger_referencing(&self) -> Result, ParserError> { let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) { Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => { TriggerReferencingType::OldTable @@ -5735,7 +5733,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_trigger_exec_body(&mut self) -> Result { + pub fn parse_trigger_exec_body(&self) -> Result { Ok(TriggerExecBody { exec_type: match self .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])? @@ -5749,7 +5747,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_macro( - &mut self, + &self, or_replace: bool, temporary: bool, ) -> Result { @@ -5783,7 +5781,7 @@ impl<'a> Parser<'a> { } } - fn parse_macro_arg(&mut self) -> Result { + fn parse_macro_arg(&self) -> Result { let name = self.parse_identifier()?; let default_expr = @@ -5795,10 +5793,7 @@ impl<'a> Parser<'a> { Ok(MacroArg { name, default_expr }) } - pub fn parse_create_external_table( - &mut self, - or_replace: bool, - ) -> Result { + pub fn parse_create_external_table(&self, or_replace: bool) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -5836,7 +5831,7 @@ impl<'a> Parser<'a> { .build()) } - pub fn parse_file_format(&mut self) -> Result { + pub fn parse_file_format(&self) -> Result { let next_token = self.next_token(); match &next_token.token { Token::Word(w) => match w.keyword { @@ -5853,7 +5848,7 @@ impl<'a> Parser<'a> { } } - fn parse_analyze_format_kind(&mut self) -> Result { + fn parse_analyze_format_kind(&self) -> Result { if self.consume_token(&Token::Eq) { Ok(AnalyzeFormatKind::Assignment(self.parse_analyze_format()?)) } else { @@ -5861,7 +5856,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_analyze_format(&mut self) -> Result { + pub fn parse_analyze_format(&self) -> Result { let next_token = self.next_token(); match &next_token.token { Token::Word(w) => match w.keyword { @@ -5875,7 +5870,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_view( - &mut self, + &self, or_alter: bool, or_replace: bool, temporary: bool, @@ -5970,7 +5965,7 @@ impl<'a> Parser<'a> { /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL]. /// /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html - fn parse_create_view_params(&mut self) -> Result, ParserError> { + fn parse_create_view_params(&self) -> Result, ParserError> { let algorithm = if self.parse_keyword(Keyword::ALGORITHM) { self.expect_token(&Token::Eq)?; Some( @@ -6025,7 +6020,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_create_role(&mut self) -> Result { + pub fn parse_create_role(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; @@ -6083,7 +6078,7 @@ impl<'a> Parser<'a> { while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) { let loc = self .tokens - .get(self.index - 1) + .get(self.index.get() - 1) .map_or(Location { line: 0, column: 0 }, |t| t.span.start); match keyword { Keyword::AUTHORIZATION => { @@ -6250,7 +6245,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_owner(&mut self) -> Result { + pub fn parse_owner(&self) -> Result { let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) { Some(Keyword::CURRENT_USER) => Owner::CurrentUser, Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole, @@ -6269,7 +6264,7 @@ impl<'a> Parser<'a> { } /// Parses a [Statement::CreateDomain] statement. - fn parse_create_domain(&mut self) -> Result { + fn parse_create_domain(&self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword_is(Keyword::AS)?; let data_type = self.parse_data_type()?; @@ -6306,7 +6301,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html) - pub fn parse_create_policy(&mut self) -> Result { + pub fn parse_create_policy(&self) -> Result { let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; @@ -6387,7 +6382,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) - pub fn parse_create_connector(&mut self) -> Result { + pub fn parse_create_connector(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; @@ -6421,7 +6416,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_drop(&mut self) -> Result { + pub fn parse_drop(&self) -> Result { // MySQL dialect supports `TEMPORARY` let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect) && self.parse_keyword(Keyword::TEMPORARY); @@ -6509,7 +6504,7 @@ impl<'a> Parser<'a> { }) } - fn parse_optional_drop_behavior(&mut self) -> Option { + fn parse_optional_drop_behavior(&self) -> Option { match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) { Some(Keyword::CASCADE) => Some(DropBehavior::Cascade), Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict), @@ -6521,7 +6516,7 @@ impl<'a> Parser<'a> { /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] /// [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_function(&mut self) -> Result { + fn parse_drop_function(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?; let drop_behavior = self.parse_optional_drop_behavior(); @@ -6537,7 +6532,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html) - fn parse_drop_policy(&mut self) -> Result { + fn parse_drop_policy(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; self.expect_keyword_is(Keyword::ON)?; @@ -6555,7 +6550,7 @@ impl<'a> Parser<'a> { /// ``` /// /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector) - fn parse_drop_connector(&mut self) -> Result { + fn parse_drop_connector(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; Ok(Statement::DropConnector { if_exists, name }) @@ -6564,7 +6559,7 @@ impl<'a> Parser<'a> { /// ```sql /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_domain(&mut self) -> Result { + fn parse_drop_domain(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_object_name(false)?; let drop_behavior = self.parse_optional_drop_behavior(); @@ -6579,7 +6574,7 @@ impl<'a> Parser<'a> { /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] /// [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_procedure(&mut self) -> Result { + fn parse_drop_procedure(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?; let drop_behavior = self.parse_optional_drop_behavior(); @@ -6590,7 +6585,7 @@ impl<'a> Parser<'a> { }) } - fn parse_function_desc(&mut self) -> Result { + fn parse_function_desc(&self) -> Result { let name = self.parse_object_name(false)?; let args = if self.consume_token(&Token::LParen) { @@ -6610,7 +6605,7 @@ impl<'a> Parser<'a> { /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details. fn parse_drop_secret( - &mut self, + &self, temporary: bool, persistent: bool, ) -> Result { @@ -6645,7 +6640,7 @@ impl<'a> Parser<'a> { /// /// The syntax can vary significantly between warehouses. See the grammar /// on the warehouse specific function in such cases. - pub fn parse_declare(&mut self) -> Result { + pub fn parse_declare(&self) -> Result { if dialect_of!(self is BigQueryDialect) { return self.parse_big_query_declare(); } @@ -6716,7 +6711,7 @@ impl<'a> Parser<'a> { /// DECLARE variable_name[, ...] [{ | }]; /// ``` /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare - pub fn parse_big_query_declare(&mut self) -> Result { + pub fn parse_big_query_declare(&self) -> Result { let names = self.parse_comma_separated(Parser::parse_identifier)?; let data_type = match self.peek_token().token { @@ -6776,7 +6771,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare - pub fn parse_snowflake_declare(&mut self) -> Result { + pub fn parse_snowflake_declare(&self) -> Result { let mut stmts = vec![]; loop { let name = self.parse_identifier()?; @@ -6879,7 +6874,7 @@ impl<'a> Parser<'a> { // } [ ,...n ] /// ``` /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 - pub fn parse_mssql_declare(&mut self) -> Result { + pub fn parse_mssql_declare(&self) -> Result { let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?; Ok(Statement::Declare { stmts }) @@ -6895,7 +6890,7 @@ impl<'a> Parser<'a> { // } [ ,...n ] /// ``` /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 - pub fn parse_mssql_declare_stmt(&mut self) -> Result { + pub fn parse_mssql_declare_stmt(&self) -> Result { let name = { let ident = self.parse_identifier()?; if !ident.value.starts_with('@') @@ -6957,7 +6952,7 @@ impl<'a> Parser<'a> { /// ``` /// pub fn parse_snowflake_variable_declaration_expression( - &mut self, + &self, ) -> Result, ParserError> { Ok(match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::DEFAULT => { @@ -6981,7 +6976,7 @@ impl<'a> Parser<'a> { /// [ = ] /// ``` pub fn parse_mssql_variable_declaration_expression( - &mut self, + &self, ) -> Result, ParserError> { Ok(match self.peek_token().token { Token::Eq => { @@ -6995,7 +6990,7 @@ impl<'a> Parser<'a> { } // FETCH [ direction { FROM | IN } ] cursor INTO target; - pub fn parse_fetch_statement(&mut self) -> Result { + pub fn parse_fetch_statement(&self) -> Result { let direction = if self.parse_keyword(Keyword::NEXT) { FetchDirection::Next } else if self.parse_keyword(Keyword::PRIOR) { @@ -7064,7 +7059,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_discard(&mut self) -> Result { + pub fn parse_discard(&self) -> Result { let object_type = if self.parse_keyword(Keyword::ALL) { DiscardObject::ALL } else if self.parse_keyword(Keyword::PLANS) { @@ -7082,7 +7077,7 @@ impl<'a> Parser<'a> { Ok(Statement::Discard { object_type }) } - pub fn parse_create_index(&mut self, unique: bool) -> Result { + pub fn parse_create_index(&self, unique: bool) -> Result { let concurrently = self.parse_keyword(Keyword::CONCURRENTLY); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -7173,7 +7168,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_create_extension(&mut self) -> Result { + pub fn parse_create_extension(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; @@ -7208,7 +7203,7 @@ impl<'a> Parser<'a> { } /// Parse a PostgreSQL-specific [Statement::DropExtension] statement. - pub fn parse_drop_extension(&mut self) -> Result { + pub fn parse_drop_extension(&self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_identifier())?; let cascade_or_restrict = @@ -7227,7 +7222,7 @@ impl<'a> Parser<'a> { } //TODO: Implement parsing for Skewed - pub fn parse_hive_distribution(&mut self) -> Result { + pub fn parse_hive_distribution(&self) -> Result { if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) { self.expect_token(&Token::LParen)?; let columns = self.parse_comma_separated(Parser::parse_column_def)?; @@ -7238,7 +7233,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_hive_formats(&mut self) -> Result { + pub fn parse_hive_formats(&self) -> Result { let mut hive_format = HiveFormat::default(); loop { match self.parse_one_of_keywords(&[ @@ -7286,7 +7281,7 @@ impl<'a> Parser<'a> { Ok(hive_format) } - pub fn parse_row_format(&mut self) -> Result { + pub fn parse_row_format(&self) -> Result { self.expect_keyword_is(Keyword::FORMAT)?; match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) { Some(Keyword::SERDE) => { @@ -7382,7 +7377,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_on_cluster(&mut self) -> Result, ParserError> { + fn parse_optional_on_cluster(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) { Ok(Some(self.parse_identifier()?)) } else { @@ -7391,7 +7386,7 @@ impl<'a> Parser<'a> { } pub fn parse_create_table( - &mut self, + &self, or_replace: bool, temporary: bool, global: Option, @@ -7509,7 +7504,7 @@ impl<'a> Parser<'a> { } fn maybe_parse_create_table_like( - &mut self, + &self, allow_unquoted_hyphen: bool, ) -> Result, ParserError> { let like = if self.dialect.supports_create_table_like_parenthesized() @@ -7546,7 +7541,7 @@ impl<'a> Parser<'a> { Ok(like) } - pub(crate) fn parse_create_table_on_commit(&mut self) -> Result { + pub(crate) fn parse_create_table_on_commit(&self) -> Result { if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) { Ok(OnCommit::DeleteRows) } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) { @@ -7566,9 +7561,7 @@ impl<'a> Parser<'a> { /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2) /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html) /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html) - fn parse_optional_create_table_config( - &mut self, - ) -> Result { + fn parse_optional_create_table_config(&self) -> Result { let mut table_options = CreateTableOptions::None; let inherits = if self.parse_keyword(Keyword::INHERITS) { @@ -7626,7 +7619,7 @@ impl<'a> Parser<'a> { }) } - fn parse_plain_option(&mut self) -> Result, ParserError> { + fn parse_plain_option(&self) -> Result, ParserError> { // Single parameter option // if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) { @@ -7819,7 +7812,7 @@ impl<'a> Parser<'a> { Ok(Some(SqlOption::KeyValue { key, value })) } - pub fn parse_plain_options(&mut self) -> Result, ParserError> { + pub fn parse_plain_options(&self) -> Result, ParserError> { let mut options = Vec::new(); while let Some(option) = self.parse_plain_option()? { @@ -7832,7 +7825,7 @@ impl<'a> Parser<'a> { Ok(options) } - pub fn parse_optional_inline_comment(&mut self) -> Result, ParserError> { + pub fn parse_optional_inline_comment(&self) -> Result, ParserError> { let comment = if self.parse_keyword(Keyword::COMMENT) { let has_eq = self.consume_token(&Token::Eq); let comment = self.parse_comment_value()?; @@ -7847,7 +7840,7 @@ impl<'a> Parser<'a> { Ok(comment) } - pub fn parse_comment_value(&mut self) -> Result { + pub fn parse_comment_value(&self) -> Result { let next_token = self.next_token(); let value = match next_token.token { Token::SingleQuotedString(str) => str, @@ -7858,7 +7851,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_procedure_parameters( - &mut self, + &self, ) -> Result>, ParserError> { let mut params = vec![]; if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) { @@ -7879,7 +7872,7 @@ impl<'a> Parser<'a> { Ok(Some(params)) } - pub fn parse_columns(&mut self) -> Result<(Vec, Vec), ParserError> { + pub fn parse_columns(&self) -> Result<(Vec, Vec), ParserError> { let mut columns = vec![]; let mut constraints = vec![]; if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) { @@ -7915,7 +7908,7 @@ impl<'a> Parser<'a> { Ok((columns, constraints)) } - pub fn parse_procedure_param(&mut self) -> Result { + pub fn parse_procedure_param(&self) -> Result { let mode = if self.parse_keyword(Keyword::IN) { Some(ArgMode::In) } else if self.parse_keyword(Keyword::OUT) { @@ -7941,7 +7934,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_column_def(&mut self) -> Result { + pub fn parse_column_def(&self) -> Result { let col_name = self.parse_identifier()?; let data_type = if self.is_column_type_sqlite_unspecified() { DataType::Unspecified @@ -7973,7 +7966,7 @@ impl<'a> Parser<'a> { }) } - fn is_column_type_sqlite_unspecified(&mut self) -> bool { + fn is_column_type_sqlite_unspecified(&self) -> bool { if dialect_of!(self is SQLiteDialect) { match self.peek_token().token { Token::Word(word) => matches!( @@ -7996,7 +7989,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_column_option(&mut self) -> Result, ParserError> { + pub fn parse_optional_column_option(&self) -> Result, ParserError> { if let Some(option) = self.dialect.parse_column_option(self)? { return option; } @@ -8009,7 +8002,7 @@ impl<'a> Parser<'a> { ) } - fn parse_optional_column_option_inner(&mut self) -> Result, ParserError> { + fn parse_optional_column_option_inner(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) { Ok(Some(ColumnOption::CharacterSet( self.parse_object_name(false)?, @@ -8239,7 +8232,7 @@ impl<'a> Parser<'a> { /// In the first we should parse the inner portion of `(42 NOT NULL)` as [Expr::IsNotNull], /// whereas is both statements that trailing `NOT NULL` should only be parsed as a /// [ColumnOption::NotNull]. - fn parse_column_option_expr(&mut self) -> Result { + fn parse_column_option_expr(&self) -> Result { if self.peek_token_ref().token == Token::LParen { let expr: Expr = self.with_state(ParserState::Normal, |p| p.parse_prefix())?; Ok(expr) @@ -8248,7 +8241,7 @@ impl<'a> Parser<'a> { } } - pub(crate) fn parse_tag(&mut self) -> Result { + pub(crate) fn parse_tag(&self) -> Result { let name = self.parse_object_name(false)?; self.expect_token(&Token::Eq)?; let value = self.parse_literal_string()?; @@ -8256,9 +8249,7 @@ impl<'a> Parser<'a> { Ok(Tag::new(name, value)) } - fn parse_optional_column_option_generated( - &mut self, - ) -> Result, ParserError> { + fn parse_optional_column_option_generated(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) { let mut sequence_options = vec![]; if self.expect_token(&Token::LParen).is_ok() { @@ -8323,7 +8314,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_column_option_as(&mut self) -> Result, ParserError> { + fn parse_optional_column_option_as(&self) -> Result, ParserError> { // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; @@ -8349,7 +8340,7 @@ impl<'a> Parser<'a> { })) } - pub fn parse_optional_clustered_by(&mut self) -> Result, ParserError> { + pub fn parse_optional_clustered_by(&self) -> Result, ParserError> { let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect) && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY]) { @@ -8378,7 +8369,7 @@ impl<'a> Parser<'a> { Ok(clustered_by) } - pub fn parse_referential_action(&mut self) -> Result { + pub fn parse_referential_action(&self) -> Result { if self.parse_keyword(Keyword::RESTRICT) { Ok(ReferentialAction::Restrict) } else if self.parse_keyword(Keyword::CASCADE) { @@ -8397,7 +8388,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_match_kind(&mut self) -> Result { + pub fn parse_match_kind(&self) -> Result { if self.parse_keyword(Keyword::FULL) { Ok(ConstraintReferenceMatchKind::Full) } else if self.parse_keyword(Keyword::PARTIAL) { @@ -8410,7 +8401,7 @@ impl<'a> Parser<'a> { } pub fn parse_constraint_characteristics( - &mut self, + &self, ) -> Result, ParserError> { let mut cc = ConstraintCharacteristics::default(); @@ -8446,9 +8437,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_table_constraint( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_table_constraint(&self) -> Result, ParserError> { let name = if self.parse_keyword(Keyword::CONSTRAINT) { Some(self.parse_identifier()?) } else { @@ -8647,7 +8636,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_nulls_distinct(&mut self) -> Result { + fn parse_optional_nulls_distinct(&self) -> Result { Ok(if self.parse_keyword(Keyword::NULLS) { let not = self.parse_keyword(Keyword::NOT); self.expect_keyword_is(Keyword::DISTINCT)?; @@ -8662,7 +8651,7 @@ impl<'a> Parser<'a> { } pub fn maybe_parse_options( - &mut self, + &self, keyword: Keyword, ) -> Result>, ParserError> { if let Token::Word(word) = self.peek_token().token { @@ -8673,7 +8662,7 @@ impl<'a> Parser<'a> { Ok(None) } - pub fn parse_options(&mut self, keyword: Keyword) -> Result, ParserError> { + pub fn parse_options(&self, keyword: Keyword) -> Result, ParserError> { if self.parse_keyword(keyword) { self.expect_token(&Token::LParen)?; let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?; @@ -8685,7 +8674,7 @@ impl<'a> Parser<'a> { } pub fn parse_options_with_keywords( - &mut self, + &self, keywords: &[Keyword], ) -> Result, ParserError> { if self.parse_keywords(keywords) { @@ -8698,7 +8687,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_index_type(&mut self) -> Result { + pub fn parse_index_type(&self) -> Result { Ok(if self.parse_keyword(Keyword::BTREE) { IndexType::BTree } else if self.parse_keyword(Keyword::HASH) { @@ -8723,9 +8712,7 @@ impl<'a> Parser<'a> { /// ```sql //// USING BTREE (name, age DESC) /// ``` - pub fn parse_optional_using_then_index_type( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_using_then_index_type(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::USING) { Ok(Some(self.parse_index_type()?)) } else { @@ -8735,12 +8722,12 @@ impl<'a> Parser<'a> { /// Parse `[ident]`, mostly `ident` is name, like: /// `window_name`, `index_name`, ... - pub fn parse_optional_ident(&mut self) -> Result, ParserError> { + pub fn parse_optional_ident(&self) -> Result, ParserError> { self.maybe_parse(|parser| parser.parse_identifier()) } #[must_use] - pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay { + pub fn parse_index_type_display(&self) -> KeyOrIndexDisplay { if self.parse_keyword(Keyword::KEY) { KeyOrIndexDisplay::Key } else if self.parse_keyword(Keyword::INDEX) { @@ -8750,7 +8737,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_index_option(&mut self) -> Result, ParserError> { + pub fn parse_optional_index_option(&self) -> Result, ParserError> { if let Some(index_type) = self.parse_optional_using_then_index_type()? { Ok(Some(IndexOption::Using(index_type))) } else if self.parse_keyword(Keyword::COMMENT) { @@ -8761,7 +8748,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_index_options(&mut self) -> Result, ParserError> { + pub fn parse_index_options(&self) -> Result, ParserError> { let mut options = Vec::new(); loop { @@ -8772,7 +8759,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_sql_option(&mut self) -> Result { + pub fn parse_sql_option(&self) -> Result { let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect); match self.peek_token().token { @@ -8795,7 +8782,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_option_clustered(&mut self) -> Result { + pub fn parse_option_clustered(&self) -> Result { if self.parse_keywords(&[ Keyword::CLUSTERED, Keyword::COLUMNSTORE, @@ -8831,7 +8818,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_option_partition(&mut self) -> Result { + pub fn parse_option_partition(&self) -> Result { self.expect_keyword_is(Keyword::PARTITION)?; self.expect_token(&Token::LParen)?; let column_name = self.parse_identifier()?; @@ -8860,14 +8847,14 @@ impl<'a> Parser<'a> { }) } - pub fn parse_partition(&mut self) -> Result { + pub fn parse_partition(&self) -> Result { self.expect_token(&Token::LParen)?; let partitions = self.parse_comma_separated(Parser::parse_expr)?; self.expect_token(&Token::RParen)?; Ok(Partition::Partitions(partitions)) } - pub fn parse_projection_select(&mut self) -> Result { + pub fn parse_projection_select(&self) -> Result { self.expect_token(&Token::LParen)?; self.expect_keyword_is(Keyword::SELECT)?; let projection = self.parse_projection()?; @@ -8880,7 +8867,7 @@ impl<'a> Parser<'a> { order_by, }) } - pub fn parse_alter_table_add_projection(&mut self) -> Result { + pub fn parse_alter_table_add_projection(&self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; let query = self.parse_projection_select()?; @@ -8891,7 +8878,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_alter_table_operation(&mut self) -> Result { + pub fn parse_alter_table_operation(&self) -> Result { let operation = if self.parse_keyword(Keyword::ADD) { if let Some(constraint) = self.parse_optional_table_constraint()? { let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]); @@ -9348,7 +9335,7 @@ impl<'a> Parser<'a> { Ok(operation) } - fn parse_set_data_type(&mut self, had_set: bool) -> Result { + fn parse_set_data_type(&self, had_set: bool) -> Result { let data_type = self.parse_data_type()?; let using = if self.dialect.supports_alter_column_type_using() && self.parse_keyword(Keyword::USING) @@ -9364,7 +9351,7 @@ impl<'a> Parser<'a> { }) } - fn parse_part_or_partition(&mut self) -> Result { + fn parse_part_or_partition(&self) -> Result { let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?; match keyword { Keyword::PART => Ok(Partition::Part(self.parse_expr()?)), @@ -9374,7 +9361,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_alter(&mut self) -> Result { + pub fn parse_alter(&self) -> Result { let object_type = self.expect_one_of_keywords(&[ Keyword::VIEW, Keyword::TYPE, @@ -9428,7 +9415,7 @@ impl<'a> Parser<'a> { } /// Parse a [Statement::AlterTable] - pub fn parse_alter_table(&mut self, iceberg: bool) -> Result { + pub fn parse_alter_table(&self, iceberg: bool) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ] let table_name = self.parse_object_name(false)?; @@ -9467,7 +9454,7 @@ impl<'a> Parser<'a> { .into()) } - pub fn parse_alter_view(&mut self) -> Result { + pub fn parse_alter_view(&self) -> Result { let name = self.parse_object_name(false)?; let columns = self.parse_parenthesized_column_list(Optional, false)?; @@ -9485,7 +9472,7 @@ impl<'a> Parser<'a> { } /// Parse a [Statement::AlterType] - pub fn parse_alter_type(&mut self) -> Result { + pub fn parse_alter_type(&self) -> Result { let name = self.parse_object_name(false)?; if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) { @@ -9535,7 +9522,7 @@ impl<'a> Parser<'a> { // Parse a [Statement::AlterSchema] // ALTER SCHEMA [ IF EXISTS ] schema_name - pub fn parse_alter_schema(&mut self) -> Result { + pub fn parse_alter_schema(&self) -> Result { self.expect_keywords(&[Keyword::ALTER, Keyword::SCHEMA])?; let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_object_name(false)?; @@ -9575,7 +9562,7 @@ impl<'a> Parser<'a> { /// Parse a `CALL procedure_name(arg1, arg2, ...)` /// or `CALL procedure_name` statement - pub fn parse_call(&mut self) -> Result { + pub fn parse_call(&self) -> Result { let object_name = self.parse_object_name(false)?; if self.peek_token().token == Token::LParen { match self.parse_function(object_name)? { @@ -9600,7 +9587,7 @@ impl<'a> Parser<'a> { } /// Parse a copy statement - pub fn parse_copy(&mut self) -> Result { + pub fn parse_copy(&self) -> Result { let source; if self.consume_token(&Token::LParen) { source = CopySource::Query(self.parse_query()?); @@ -9667,14 +9654,14 @@ impl<'a> Parser<'a> { } /// Parse [Statement::Open] - fn parse_open(&mut self) -> Result { + fn parse_open(&self) -> Result { self.expect_keyword(Keyword::OPEN)?; Ok(Statement::Open(OpenStatement { cursor_name: self.parse_identifier()?, })) } - pub fn parse_close(&mut self) -> Result { + pub fn parse_close(&self) -> Result { let cursor = if self.parse_keyword(Keyword::ALL) { CloseCursor::All } else { @@ -9686,7 +9673,7 @@ impl<'a> Parser<'a> { Ok(Statement::Close { cursor }) } - fn parse_copy_option(&mut self) -> Result { + fn parse_copy_option(&self) -> Result { let ret = match self.parse_one_of_keywords(&[ Keyword::FORMAT, Keyword::FREEZE, @@ -9728,7 +9715,7 @@ impl<'a> Parser<'a> { Ok(ret) } - fn parse_copy_legacy_option(&mut self) -> Result { + fn parse_copy_legacy_option(&self) -> Result { // FORMAT \[ AS \] is optional if self.parse_keyword(Keyword::FORMAT) { let _ = self.parse_keyword(Keyword::AS); @@ -9924,13 +9911,13 @@ impl<'a> Parser<'a> { Ok(ret) } - fn parse_file_size(&mut self) -> Result { + fn parse_file_size(&self) -> Result { let size = self.parse_number_value()?.value; let unit = self.maybe_parse_file_size_unit(); Ok(FileSize { size, unit }) } - fn maybe_parse_file_size_unit(&mut self) -> Option { + fn maybe_parse_file_size_unit(&self) -> Option { match self.parse_one_of_keywords(&[Keyword::MB, Keyword::GB]) { Some(Keyword::MB) => Some(FileSizeUnit::MB), Some(Keyword::GB) => Some(FileSizeUnit::GB), @@ -9938,7 +9925,7 @@ impl<'a> Parser<'a> { } } - fn parse_iam_role_kind(&mut self) -> Result { + fn parse_iam_role_kind(&self) -> Result { if self.parse_keyword(Keyword::DEFAULT) { Ok(IamRoleKind::Default) } else { @@ -9947,7 +9934,7 @@ impl<'a> Parser<'a> { } } - fn parse_copy_legacy_csv_option(&mut self) -> Result { + fn parse_copy_legacy_csv_option(&self) -> Result { let ret = match self.parse_one_of_keywords(&[ Keyword::HEADER, Keyword::QUOTE, @@ -9978,12 +9965,12 @@ impl<'a> Parser<'a> { Ok(ret) } - fn parse_literal_char(&mut self) -> Result { + fn parse_literal_char(&self) -> Result { let s = self.parse_literal_string()?; if s.len() != 1 { let loc = self .tokens - .get(self.index - 1) + .get(self.index.get() - 1) .map_or(Location { line: 0, column: 0 }, |t| t.span.start); return parser_err!(format!("Expect a char, found {s:?}"), loc); } @@ -9992,11 +9979,11 @@ impl<'a> Parser<'a> { /// Parse a tab separated values in /// COPY payload - pub fn parse_tsv(&mut self) -> Vec> { + pub fn parse_tsv(&self) -> Vec> { self.parse_tab_value() } - pub fn parse_tab_value(&mut self) -> Vec> { + pub fn parse_tab_value(&self) -> Vec> { let mut values = vec![]; let mut content = String::from(""); while let Some(t) = self.next_token_no_skip().map(|t| &t.token) { @@ -10028,7 +10015,7 @@ impl<'a> Parser<'a> { } /// Parse a literal value (numbers, strings, date/time, booleans) - pub fn parse_value(&mut self) -> Result { + pub fn parse_value(&self) -> Result { let next_token = self.next_token(); let span = next_token.span; let ok_value = |value: Value| Ok(value.with_span(span)); @@ -10139,7 +10126,7 @@ impl<'a> Parser<'a> { } } - fn maybe_concat_string_literal(&mut self, mut str: String) -> String { + fn maybe_concat_string_literal(&self, mut str: String) -> String { if self.dialect.supports_string_literal_concatenation() { while let Token::SingleQuotedString(ref s) | Token::DoubleQuotedString(ref s) = self.peek_token_ref().token @@ -10152,7 +10139,7 @@ impl<'a> Parser<'a> { } /// Parse an unsigned numeric literal - pub fn parse_number_value(&mut self) -> Result { + pub fn parse_number_value(&self) -> Result { let value_wrapper = self.parse_value()?; match &value_wrapper.value { Value::Number(_, _) => Ok(value_wrapper), @@ -10166,7 +10153,7 @@ impl<'a> Parser<'a> { /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed, /// otherwise returns a [`Expr::Value`] - pub fn parse_number(&mut self) -> Result { + pub fn parse_number(&self) -> Result { let next_token = self.next_token(); match next_token.token { Token::Plus => Ok(Expr::UnaryOp { @@ -10184,7 +10171,7 @@ impl<'a> Parser<'a> { } } - fn parse_introduced_string_expr(&mut self) -> Result { + fn parse_introduced_string_expr(&self) -> Result { let next_token = self.next_token(); let span = next_token.span; match next_token.token { @@ -10208,7 +10195,7 @@ impl<'a> Parser<'a> { } /// Parse an unsigned literal integer/long - pub fn parse_literal_uint(&mut self) -> Result { + pub fn parse_literal_uint(&self) -> Result { let next_token = self.next_token(); match next_token.token { Token::Number(s, _) => Self::parse::(s, next_token.span.start), @@ -10218,7 +10205,7 @@ impl<'a> Parser<'a> { /// Parse the body of a `CREATE FUNCTION` specified as a string. /// e.g. `CREATE FUNCTION ... AS $$ body $$`. - fn parse_create_function_body_string(&mut self) -> Result { + fn parse_create_function_body_string(&self) -> Result { let peek_token = self.peek_token(); let span = peek_token.span; match peek_token.token { @@ -10234,7 +10221,7 @@ impl<'a> Parser<'a> { } /// Parse a literal string - pub fn parse_literal_string(&mut self) -> Result { + pub fn parse_literal_string(&self) -> Result { let next_token = self.next_token(); match next_token.token { Token::Word(Word { @@ -10253,7 +10240,7 @@ impl<'a> Parser<'a> { } /// Parse a boolean string - pub(crate) fn parse_boolean_string(&mut self) -> Result { + pub(crate) fn parse_boolean_string(&self) -> Result { match self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) { Some(Keyword::TRUE) => Ok(true), Some(Keyword::FALSE) => Ok(false), @@ -10262,7 +10249,7 @@ impl<'a> Parser<'a> { } /// Parse a literal unicode normalization clause - pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result { + pub fn parse_unicode_is_normalized(&self, expr: Expr) -> Result { let neg = self.parse_keyword(Keyword::NOT); let normalized_form = self.maybe_parse(|parser| { match parser.parse_one_of_keywords(&[ @@ -10288,7 +10275,7 @@ impl<'a> Parser<'a> { self.expected("unicode normalization form", self.peek_token()) } - pub fn parse_enum_values(&mut self) -> Result, ParserError> { + pub fn parse_enum_values(&self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let values = self.parse_comma_separated(|parser| { let name = parser.parse_literal_string()?; @@ -10306,7 +10293,7 @@ impl<'a> Parser<'a> { } /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example) - pub fn parse_data_type(&mut self) -> Result { + pub fn parse_data_type(&self) -> Result { let (ty, trailing_bracket) = self.parse_data_type_helper()?; if trailing_bracket.0 { return parser_err!( @@ -10318,9 +10305,7 @@ impl<'a> Parser<'a> { Ok(ty) } - fn parse_data_type_helper( - &mut self, - ) -> Result<(DataType, MatchedTrailingBracket), ParserError> { + fn parse_data_type_helper(&self) -> Result<(DataType, MatchedTrailingBracket), ParserError> { let dialect = self.dialect; self.advance_token(); let next_token = self.get_current_token(); @@ -10742,18 +10727,18 @@ impl<'a> Parser<'a> { Ok((data, trailing_bracket)) } - fn parse_returns_table_column(&mut self) -> Result { + fn parse_returns_table_column(&self) -> Result { self.parse_column_def() } - fn parse_returns_table_columns(&mut self) -> Result, ParserError> { + fn parse_returns_table_columns(&self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?; self.expect_token(&Token::RParen)?; Ok(columns) } - pub fn parse_string_values(&mut self) -> Result, ParserError> { + pub fn parse_string_values(&self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let mut values = Vec::new(); loop { @@ -10773,7 +10758,7 @@ impl<'a> Parser<'a> { } /// Strictly parse `identifier AS identifier` - pub fn parse_identifier_with_alias(&mut self) -> Result { + pub fn parse_identifier_with_alias(&self) -> Result { let ident = self.parse_identifier()?; self.expect_keyword_is(Keyword::AS)?; let alias = self.parse_identifier()?; @@ -10781,7 +10766,7 @@ impl<'a> Parser<'a> { } /// Parse `identifier [AS] identifier` where the AS keyword is optional - fn parse_identifier_with_optional_alias(&mut self) -> Result { + fn parse_identifier_with_optional_alias(&self) -> Result { let ident = self.parse_identifier()?; let _after_as = self.parse_keyword(Keyword::AS); let alias = self.parse_identifier()?; @@ -10789,7 +10774,7 @@ impl<'a> Parser<'a> { } /// Parse comma-separated list of parenthesized queries for pipe operators - fn parse_pipe_operator_queries(&mut self) -> Result, ParserError> { + fn parse_pipe_operator_queries(&self) -> Result, ParserError> { self.parse_comma_separated(|parser| { parser.expect_token(&Token::LParen)?; let query = parser.parse_query()?; @@ -10800,7 +10785,7 @@ impl<'a> Parser<'a> { /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT fn parse_distinct_required_set_quantifier( - &mut self, + &self, operator_name: &str, ) -> Result { let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect)); @@ -10813,7 +10798,7 @@ impl<'a> Parser<'a> { } /// Parse optional identifier alias (with or without AS keyword) - fn parse_identifier_optional_alias(&mut self) -> Result, ParserError> { + fn parse_identifier_optional_alias(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::AS) { Ok(Some(self.parse_identifier()?)) } else { @@ -10823,8 +10808,8 @@ impl<'a> Parser<'a> { } /// Optionally parses an alias for a select list item - fn maybe_parse_select_item_alias(&mut self) -> Result, ParserError> { - fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + fn maybe_parse_select_item_alias(&self) -> Result, ParserError> { + fn validator(explicit: bool, kw: &Keyword, parser: &Parser) -> bool { parser.dialect.is_select_item_alias(explicit, kw, parser) } self.parse_optional_alias_inner(None, validator) @@ -10833,8 +10818,8 @@ impl<'a> Parser<'a> { /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`. /// In this case, the alias is allowed to optionally name the columns in the table, in /// addition to the table itself. - pub fn maybe_parse_table_alias(&mut self) -> Result, ParserError> { - fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + pub fn maybe_parse_table_alias(&self) -> Result, ParserError> { + fn validator(explicit: bool, kw: &Keyword, parser: &Parser) -> bool { parser.dialect.is_table_factor_alias(explicit, kw, parser) } match self.parse_optional_alias_inner(None, validator)? { @@ -10846,7 +10831,7 @@ impl<'a> Parser<'a> { } } - fn parse_table_index_hints(&mut self) -> Result, ParserError> { + fn parse_table_index_hints(&self) -> Result, ParserError> { let mut hints = vec![]; while let Some(hint_type) = self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE]) @@ -10908,10 +10893,10 @@ impl<'a> Parser<'a> { /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias` /// and `maybe_parse_table_alias`. pub fn parse_optional_alias( - &mut self, + &self, reserved_kwds: &[Keyword], ) -> Result, ParserError> { - fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool { + fn validator(_explicit: bool, _kw: &Keyword, _parser: &Parser) -> bool { false } self.parse_optional_alias_inner(Some(reserved_kwds), validator) @@ -10924,12 +10909,12 @@ impl<'a> Parser<'a> { /// to call to validate if a keyword should be parsed as an alias, to allow /// callers to customize the parsing logic based on their context. fn parse_optional_alias_inner( - &mut self, + &self, reserved_kwds: Option<&[Keyword]>, validator: F, ) -> Result, ParserError> where - F: Fn(bool, &Keyword, &mut Parser) -> bool, + F: Fn(bool, &Keyword, &Parser) -> bool, { let after_as = self.parse_keyword(Keyword::AS); @@ -10961,7 +10946,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_group_by(&mut self) -> Result, ParserError> { + pub fn parse_optional_group_by(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { let expressions = if self.parse_keyword(Keyword::ALL) { None @@ -11017,7 +11002,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_order_by(&mut self) -> Result, ParserError> { + pub fn parse_optional_order_by(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { let order_by = if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) { @@ -11044,7 +11029,7 @@ impl<'a> Parser<'a> { } } - fn parse_optional_limit_clause(&mut self) -> Result, ParserError> { + fn parse_optional_limit_clause(&self) -> Result, ParserError> { let mut offset = if self.parse_keyword(Keyword::OFFSET) { Some(self.parse_offset()?) } else { @@ -11100,7 +11085,7 @@ impl<'a> Parser<'a> { /// Parse a table object for insertion /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)` - pub fn parse_table_object(&mut self) -> Result { + pub fn parse_table_object(&self) -> Result { if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) { let fn_name = self.parse_object_name(false)?; self.parse_function_call(fn_name) @@ -11116,7 +11101,7 @@ impl<'a> Parser<'a> { /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN, /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers /// in this context on BigQuery. - pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result { + pub fn parse_object_name(&self, in_table_clause: bool) -> Result { self.parse_object_name_inner(in_table_clause, false) } @@ -11130,7 +11115,7 @@ impl<'a> Parser<'a> { /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name /// e.g. *, *.*, `foo`.*, or "foo"."bar" fn parse_object_name_inner( - &mut self, + &self, in_table_clause: bool, allow_wildcards: bool, ) -> Result { @@ -11218,7 +11203,7 @@ impl<'a> Parser<'a> { } /// Parse identifiers - pub fn parse_identifiers(&mut self) -> Result, ParserError> { + pub fn parse_identifiers(&self) -> Result, ParserError> { let mut idents = vec![]; loop { match &self.peek_token_ref().token { @@ -11272,7 +11257,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [parse_identifiers]: Parser::parse_identifiers - pub fn parse_multipart_identifier(&mut self) -> Result, ParserError> { + pub fn parse_multipart_identifier(&self) -> Result, ParserError> { let mut idents = vec![]; // expecting at least one word for identifier @@ -11324,7 +11309,7 @@ impl<'a> Parser<'a> { } /// Parse a simple one-word identifier (possibly quoted, possibly a keyword) - pub fn parse_identifier(&mut self) -> Result { + pub fn parse_identifier(&self) -> Result { let next_token = self.next_token(); match next_token.token { Token::Word(w) => Ok(w.into_ident(next_token.span)), @@ -11344,7 +11329,7 @@ impl<'a> Parser<'a> { /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical) /// /// Return a tuple of the identifier and a boolean indicating it ends with a period. - fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> { + fn parse_unquoted_hyphenated_identifier(&self) -> Result<(Ident, bool), ParserError> { match self.peek_token().token { Token::Word(w) => { let quote_style_is_none = w.quote_style.is_none(); @@ -11413,7 +11398,7 @@ impl<'a> Parser<'a> { } /// Parses a parenthesized, comma-separated list of column definitions within a view. - fn parse_view_columns(&mut self) -> Result, ParserError> { + fn parse_view_columns(&self) -> Result, ParserError> { if self.consume_token(&Token::LParen) { if self.peek_token().token == Token::RParen { self.next_token(); @@ -11433,7 +11418,7 @@ impl<'a> Parser<'a> { } /// Parses a column definition within a view. - fn parse_view_column(&mut self) -> Result { + fn parse_view_column(&self) -> Result { let name = self.parse_identifier()?; let options = self.parse_view_column_options()?; let data_type = if dialect_of!(self is ClickHouseDialect) { @@ -11448,7 +11433,7 @@ impl<'a> Parser<'a> { }) } - fn parse_view_column_options(&mut self) -> Result, ParserError> { + fn parse_view_column_options(&self) -> Result, ParserError> { let mut options = Vec::new(); loop { let option = self.parse_optional_column_option()?; @@ -11470,7 +11455,7 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers. /// For example: `(col1, "col 2", ...)` pub fn parse_parenthesized_column_list( - &mut self, + &self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { @@ -11478,7 +11463,7 @@ impl<'a> Parser<'a> { } pub fn parse_parenthesized_compound_identifier_list( - &mut self, + &self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { @@ -11491,7 +11476,7 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary /// expressions with ordering information (and an opclass in some dialects). - fn parse_parenthesized_index_column_list(&mut self) -> Result, ParserError> { + fn parse_parenthesized_index_column_list(&self) -> Result, ParserError> { self.parse_parenthesized_column_list_inner(Mandatory, false, |p| { p.parse_create_index_expr() }) @@ -11500,7 +11485,7 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers. /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)` pub fn parse_parenthesized_qualified_column_list( - &mut self, + &self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { @@ -11512,13 +11497,13 @@ impl<'a> Parser<'a> { /// Parses a parenthesized comma-separated list of columns using /// the provided function to parse each element. fn parse_parenthesized_column_list_inner( - &mut self, + &self, optional: IsOptional, allow_empty: bool, mut f: F, ) -> Result, ParserError> where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { if self.consume_token(&Token::LParen) { if allow_empty && self.peek_token().token == Token::RParen { @@ -11537,7 +11522,7 @@ impl<'a> Parser<'a> { } /// Parses a parenthesized comma-separated list of table alias column definitions. - fn parse_table_alias_column_defs(&mut self) -> Result, ParserError> { + fn parse_table_alias_column_defs(&self) -> Result, ParserError> { if self.consume_token(&Token::LParen) { let cols = self.parse_comma_separated(|p| { let name = p.parse_identifier()?; @@ -11551,14 +11536,14 @@ impl<'a> Parser<'a> { } } - pub fn parse_precision(&mut self) -> Result { + pub fn parse_precision(&self) -> Result { self.expect_token(&Token::LParen)?; let n = self.parse_literal_uint()?; self.expect_token(&Token::RParen)?; Ok(n) } - pub fn parse_optional_precision(&mut self) -> Result, ParserError> { + pub fn parse_optional_precision(&self) -> Result, ParserError> { if self.consume_token(&Token::LParen) { let n = self.parse_literal_uint()?; self.expect_token(&Token::RParen)?; @@ -11568,9 +11553,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_optional_interval_fields( - &mut self, - ) -> Result, ParserError> { + fn maybe_parse_optional_interval_fields(&self) -> Result, ParserError> { match self.parse_one_of_keywords(&[ // Can be followed by `TO` option Keyword::YEAR, @@ -11654,7 +11637,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64 - pub fn parse_datetime_64(&mut self) -> Result<(u64, Option), ParserError> { + pub fn parse_datetime_64(&self) -> Result<(u64, Option), ParserError> { self.expect_keyword_is(Keyword::DATETIME64)?; self.expect_token(&Token::LParen)?; let precision = self.parse_literal_uint()?; @@ -11667,9 +11650,7 @@ impl<'a> Parser<'a> { Ok((precision, time_zone)) } - pub fn parse_optional_character_length( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_character_length(&self) -> Result, ParserError> { if self.consume_token(&Token::LParen) { let character_length = self.parse_character_length()?; self.expect_token(&Token::RParen)?; @@ -11679,7 +11660,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_binary_length(&mut self) -> Result, ParserError> { + pub fn parse_optional_binary_length(&self) -> Result, ParserError> { if self.consume_token(&Token::LParen) { let binary_length = self.parse_binary_length()?; self.expect_token(&Token::RParen)?; @@ -11689,7 +11670,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_character_length(&mut self) -> Result { + pub fn parse_character_length(&self) -> Result { if self.parse_keyword(Keyword::MAX) { return Ok(CharacterLength::Max); } @@ -11704,7 +11685,7 @@ impl<'a> Parser<'a> { Ok(CharacterLength::IntegerLength { length, unit }) } - pub fn parse_binary_length(&mut self) -> Result { + pub fn parse_binary_length(&self) -> Result { if self.parse_keyword(Keyword::MAX) { return Ok(BinaryLength::Max); } @@ -11713,7 +11694,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_precision_scale( - &mut self, + &self, ) -> Result<(Option, Option), ParserError> { if self.consume_token(&Token::LParen) { let n = self.parse_literal_uint()?; @@ -11730,7 +11711,7 @@ impl<'a> Parser<'a> { } pub fn parse_exact_number_optional_precision_scale( - &mut self, + &self, ) -> Result { if self.consume_token(&Token::LParen) { let precision = self.parse_literal_uint()?; @@ -11752,7 +11733,7 @@ impl<'a> Parser<'a> { } /// Parse an optionally signed integer literal. - fn parse_signed_integer(&mut self) -> Result { + fn parse_signed_integer(&self) -> Result { let is_negative = self.consume_token(&Token::Minus); if !is_negative { @@ -11772,7 +11753,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_type_modifiers(&mut self) -> Result>, ParserError> { + pub fn parse_optional_type_modifiers(&self) -> Result>, ParserError> { if self.consume_token(&Token::LParen) { let mut modifiers = Vec::new(); loop { @@ -11799,7 +11780,7 @@ impl<'a> Parser<'a> { } /// Parse a parenthesized sub data type - fn parse_sub_type(&mut self, parent_type: F) -> Result + fn parse_sub_type(&self, parent_type: F) -> Result where F: FnOnce(Box) -> DataType, { @@ -11812,18 +11793,18 @@ impl<'a> Parser<'a> { /// Parse a DELETE statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds - fn parse_delete_setexpr_boxed(&mut self) -> Result, ParserError> { + fn parse_delete_setexpr_boxed(&self) -> Result, ParserError> { Ok(Box::new(SetExpr::Delete(self.parse_delete()?))) } /// Parse a MERGE statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds - fn parse_merge_setexpr_boxed(&mut self) -> Result, ParserError> { + fn parse_merge_setexpr_boxed(&self) -> Result, ParserError> { Ok(Box::new(SetExpr::Merge(self.parse_merge()?))) } - pub fn parse_delete(&mut self) -> Result { + pub fn parse_delete(&self) -> Result { let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) { // `FROM` keyword is optional in BigQuery SQL. // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement @@ -11881,7 +11862,7 @@ impl<'a> Parser<'a> { } // KILL [CONNECTION | QUERY | MUTATION] processlist_id - pub fn parse_kill(&mut self) -> Result { + pub fn parse_kill(&self) -> Result { let modifier_keyword = self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]); @@ -11906,10 +11887,7 @@ impl<'a> Parser<'a> { Ok(Statement::Kill { modifier, id }) } - pub fn parse_explain( - &mut self, - describe_alias: DescribeAlias, - ) -> Result { + pub fn parse_explain(&self, describe_alias: DescribeAlias) -> Result { let mut analyze = false; let mut verbose = false; let mut query_plan = false; @@ -11980,7 +11958,7 @@ impl<'a> Parser<'a> { /// preceded with some `WITH` CTE declarations and optionally followed /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't /// expect the initial keyword to be already consumed - pub fn parse_query(&mut self) -> Result, ParserError> { + pub fn parse_query(&self) -> Result, ParserError> { let _guard = self.recursion_counter.try_decrease()?; let with = if self.parse_keyword(Keyword::WITH) { let with_token = self.get_current_token(); @@ -12108,7 +12086,7 @@ impl<'a> Parser<'a> { } } - fn parse_pipe_operators(&mut self) -> Result, ParserError> { + fn parse_pipe_operators(&self) -> Result, ParserError> { let mut pipe_operators = Vec::new(); while self.consume_token(&Token::VerticalBarRightAngleBracket) { @@ -12327,7 +12305,7 @@ impl<'a> Parser<'a> { Ok(pipe_operators) } - fn parse_settings(&mut self) -> Result>, ParserError> { + fn parse_settings(&self) -> Result>, ParserError> { let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect) && self.parse_keyword(Keyword::SETTINGS) { @@ -12345,7 +12323,7 @@ impl<'a> Parser<'a> { } /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause - pub fn parse_for_clause(&mut self) -> Result, ParserError> { + pub fn parse_for_clause(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::XML) { Ok(Some(self.parse_for_xml()?)) } else if self.parse_keyword(Keyword::JSON) { @@ -12358,7 +12336,7 @@ impl<'a> Parser<'a> { } /// Parse a mssql `FOR XML` clause - pub fn parse_for_xml(&mut self) -> Result { + pub fn parse_for_xml(&self) -> Result { let for_xml = if self.parse_keyword(Keyword::RAW) { let mut element_name = None; if self.peek_token().token == Token::LParen { @@ -12413,7 +12391,7 @@ impl<'a> Parser<'a> { } /// Parse a mssql `FOR JSON` clause - pub fn parse_for_json(&mut self) -> Result { + pub fn parse_for_json(&self) -> Result { let for_json = if self.parse_keyword(Keyword::AUTO) { ForJson::Auto } else if self.parse_keyword(Keyword::PATH) { @@ -12447,7 +12425,7 @@ impl<'a> Parser<'a> { } /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`) - pub fn parse_cte(&mut self) -> Result { + pub fn parse_cte(&self) -> Result { let name = self.parse_identifier()?; let mut cte = if self.parse_keyword(Keyword::AS) { @@ -12514,7 +12492,7 @@ impl<'a> Parser<'a> { /// subquery ::= query_body [ order_by_limit ] /// set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body /// ``` - pub fn parse_query_body(&mut self, precedence: u8) -> Result, ParserError> { + pub fn parse_query_body(&self, precedence: u8) -> Result, ParserError> { // We parse the expression using a Pratt parser, as in `parse_expr()`. // Start by parsing a restricted SELECT or a `(subquery)`: let expr = if self.peek_keyword(Keyword::SELECT) @@ -12545,7 +12523,7 @@ impl<'a> Parser<'a> { /// /// (this is its own function to reduce required stack size in debug builds) fn parse_remaining_set_exprs( - &mut self, + &self, mut expr: SetExpr, precedence: u8, ) -> Result, ParserError> { @@ -12578,7 +12556,7 @@ impl<'a> Parser<'a> { Ok(expr.into()) } - pub fn parse_set_operator(&mut self, token: &Token) -> Option { + pub fn parse_set_operator(&self, token: &Token) -> Option { match token { Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union), Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except), @@ -12588,7 +12566,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_set_quantifier(&mut self, op: &Option) -> SetQuantifier { + pub fn parse_set_quantifier(&self, op: &Option) -> SetQuantifier { match op { Some( SetOperator::Except @@ -12617,7 +12595,7 @@ impl<'a> Parser<'a> { } /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`) - pub fn parse_select(&mut self) -> Result { + pub fn parse_select(&self) -> Result { let mut from_first = None; if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) { @@ -12836,7 +12814,7 @@ impl<'a> Parser<'a> { }) } - fn parse_value_table_mode(&mut self) -> Result, ParserError> { + fn parse_value_table_mode(&self) -> Result, ParserError> { if !dialect_of!(self is BigQueryDialect) { return Ok(None); } @@ -12865,18 +12843,18 @@ impl<'a> Parser<'a> { /// Invoke `f` after first setting the parser's `ParserState` to `state`. /// /// Upon return, restores the parser's state to what it started at. - fn with_state(&mut self, state: ParserState, mut f: F) -> Result + fn with_state(&self, state: ParserState, mut f: F) -> Result where - F: FnMut(&mut Parser) -> Result, + F: FnMut(&Parser) -> Result, { - let current_state = self.state; - self.state = state; + let current_state = self.state.get(); + self.state.set(state); let res = f(self); - self.state = current_state; + self.state.set(current_state); res } - pub fn parse_connect_by(&mut self) -> Result { + pub fn parse_connect_by(&self) -> Result { let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) { let relationships = self.with_state(ParserState::ConnectBy, |parser| { parser.parse_comma_separated(Parser::parse_expr) @@ -12900,7 +12878,7 @@ impl<'a> Parser<'a> { } /// Parse `CREATE TABLE x AS TABLE y` - pub fn parse_as_table(&mut self) -> Result { + pub fn parse_as_table(&self) -> Result { let token1 = self.next_token(); let token2 = self.next_token(); let token3 = self.next_token(); @@ -12945,10 +12923,7 @@ impl<'a> Parser<'a> { } /// Parse a `SET ROLE` statement. Expects SET to be consumed already. - fn parse_set_role( - &mut self, - modifier: Option, - ) -> Result { + fn parse_set_role(&self, modifier: Option) -> Result { self.expect_keyword_is(Keyword::ROLE)?; let role_name = if self.parse_keyword(Keyword::NONE) { @@ -12962,10 +12937,7 @@ impl<'a> Parser<'a> { })) } - fn parse_set_values( - &mut self, - parenthesized_assignment: bool, - ) -> Result, ParserError> { + fn parse_set_values(&self, parenthesized_assignment: bool) -> Result, ParserError> { let mut values = vec![]; if parenthesized_assignment { @@ -12993,7 +12965,7 @@ impl<'a> Parser<'a> { } } - fn parse_context_modifier(&mut self) -> Option { + fn parse_context_modifier(&self) -> Option { let modifier = self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?; @@ -13001,7 +12973,7 @@ impl<'a> Parser<'a> { } /// Parse a single SET statement assignment `var = expr`. - fn parse_set_assignment(&mut self) -> Result { + fn parse_set_assignment(&self) -> Result { let scope = self.parse_context_modifier(); let name = if self.dialect.supports_parenthesized_set_variables() @@ -13024,7 +12996,7 @@ impl<'a> Parser<'a> { Ok(SetAssignment { scope, name, value }) } - fn parse_set(&mut self) -> Result { + fn parse_set(&self) -> Result { let hivevar = self.parse_keyword(Keyword::HIVEVAR); // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both @@ -13137,7 +13109,7 @@ impl<'a> Parser<'a> { && self.consume_token(&Token::LParen) { let vars = OneOrManyWithParens::Many( - self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())? + self.parse_comma_separated(|parser: &Parser<'a>| parser.parse_identifier())? .into_iter() .map(|ident| ObjectName::from(vec![ident])) .collect(), @@ -13173,7 +13145,7 @@ impl<'a> Parser<'a> { self.expected("equals sign or TO", self.peek_token()) } - pub fn parse_set_session_params(&mut self) -> Result { + pub fn parse_set_session_params(&self) -> Result { if self.parse_keyword(Keyword::STATISTICS) { let topic = match self.parse_one_of_keywords(&[ Keyword::IO, @@ -13237,7 +13209,7 @@ impl<'a> Parser<'a> { } } - fn parse_session_param_value(&mut self) -> Result { + fn parse_session_param_value(&self) -> Result { if self.parse_keyword(Keyword::ON) { Ok(SessionParamValue::On) } else if self.parse_keyword(Keyword::OFF) { @@ -13247,7 +13219,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_show(&mut self) -> Result { + pub fn parse_show(&self) -> Result { let terse = self.parse_keyword(Keyword::TERSE); let extended = self.parse_keyword(Keyword::EXTENDED); let full = self.parse_keyword(Keyword::FULL); @@ -13306,7 +13278,7 @@ impl<'a> Parser<'a> { } } - fn parse_show_charset(&mut self, is_shorthand: bool) -> Result { + fn parse_show_charset(&self, is_shorthand: bool) -> Result { // parse one of keywords Ok(Statement::ShowCharset(ShowCharset { is_shorthand, @@ -13314,7 +13286,7 @@ impl<'a> Parser<'a> { })) } - fn parse_show_databases(&mut self, terse: bool) -> Result { + fn parse_show_databases(&self, terse: bool) -> Result { let history = self.parse_keyword(Keyword::HISTORY); let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowDatabases { @@ -13324,7 +13296,7 @@ impl<'a> Parser<'a> { }) } - fn parse_show_schemas(&mut self, terse: bool) -> Result { + fn parse_show_schemas(&self, terse: bool) -> Result { let history = self.parse_keyword(Keyword::HISTORY); let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowSchemas { @@ -13334,7 +13306,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_show_create(&mut self) -> Result { + pub fn parse_show_create(&self) -> Result { let obj_type = match self.expect_one_of_keywords(&[ Keyword::TABLE, Keyword::TRIGGER, @@ -13359,11 +13331,7 @@ impl<'a> Parser<'a> { Ok(Statement::ShowCreate { obj_type, obj_name }) } - pub fn parse_show_columns( - &mut self, - extended: bool, - full: bool, - ) -> Result { + pub fn parse_show_columns(&self, extended: bool, full: bool) -> Result { let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowColumns { extended, @@ -13373,7 +13341,7 @@ impl<'a> Parser<'a> { } fn parse_show_tables( - &mut self, + &self, terse: bool, extended: bool, full: bool, @@ -13391,11 +13359,7 @@ impl<'a> Parser<'a> { }) } - fn parse_show_views( - &mut self, - terse: bool, - materialized: bool, - ) -> Result { + fn parse_show_views(&self, terse: bool, materialized: bool) -> Result { let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowViews { materialized, @@ -13404,19 +13368,17 @@ impl<'a> Parser<'a> { }) } - pub fn parse_show_functions(&mut self) -> Result { + pub fn parse_show_functions(&self) -> Result { let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowFunctions { filter }) } - pub fn parse_show_collation(&mut self) -> Result { + pub fn parse_show_collation(&self) -> Result { let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowCollation { filter }) } - pub fn parse_show_statement_filter( - &mut self, - ) -> Result, ParserError> { + pub fn parse_show_statement_filter(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::LIKE) { Ok(Some(ShowStatementFilter::Like( self.parse_literal_string()?, @@ -13437,7 +13399,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_use(&mut self) -> Result { + pub fn parse_use(&self) -> Result { // Determine which keywords are recognized by the current dialect let parsed_keyword = if dialect_of!(self is HiveDialect) { // HiveDialect accepts USE DEFAULT; statement without any db specified @@ -13476,7 +13438,7 @@ impl<'a> Parser<'a> { Ok(Statement::Use(result)) } - fn parse_secondary_roles(&mut self) -> Result { + fn parse_secondary_roles(&self) -> Result { self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?; if self.parse_keyword(Keyword::NONE) { Ok(Use::SecondaryRoles(SecondaryRoles::None)) @@ -13488,7 +13450,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_table_and_joins(&mut self) -> Result { + pub fn parse_table_and_joins(&self) -> Result { let relation = self.parse_table_factor()?; // Note that for keywords to be properly handled here, they need to be // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as @@ -13497,7 +13459,7 @@ impl<'a> Parser<'a> { Ok(TableWithJoins { relation, joins }) } - fn parse_joins(&mut self) -> Result, ParserError> { + fn parse_joins(&self) -> Result, ParserError> { let mut joins = vec![]; loop { let global = self.parse_keyword(Keyword::GLOBAL); @@ -13680,7 +13642,7 @@ impl<'a> Parser<'a> { } /// A table name or a parenthesized subquery, followed by optional `[AS] alias` - pub fn parse_table_factor(&mut self) -> Result { + pub fn parse_table_factor(&self) -> Result { if self.parse_keyword(Keyword::LATERAL) { // LATERAL must always be followed by a subquery or table function. if self.consume_token(&Token::LParen) { @@ -14006,7 +13968,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_table_sample(&mut self) -> Result>, ParserError> { + fn maybe_parse_table_sample(&self) -> Result>, ParserError> { let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) { TableSampleModifier::TableSample } else if self.parse_keyword(Keyword::SAMPLE) { @@ -14018,7 +13980,7 @@ impl<'a> Parser<'a> { } fn parse_table_sample( - &mut self, + &self, modifier: TableSampleModifier, ) -> Result, ParserError> { let name = match self.parse_one_of_keywords(&[ @@ -14113,7 +14075,7 @@ impl<'a> Parser<'a> { } fn parse_table_sample_seed( - &mut self, + &self, modifier: TableSampleSeedModifier, ) -> Result { self.expect_token(&Token::LParen)?; @@ -14124,7 +14086,7 @@ impl<'a> Parser<'a> { /// Parses `OPENJSON( jsonExpression [ , path ] ) [ ]` clause, /// assuming the `OPENJSON` keyword was already consumed. - fn parse_open_json_table_factor(&mut self) -> Result { + fn parse_open_json_table_factor(&self) -> Result { self.expect_token(&Token::LParen)?; let json_expr = self.parse_expr()?; let json_path = if self.consume_token(&Token::Comma) { @@ -14150,7 +14112,7 @@ impl<'a> Parser<'a> { }) } - fn parse_xml_table_factor(&mut self) -> Result { + fn parse_xml_table_factor(&self) -> Result { self.expect_token(&Token::LParen)?; let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) { self.expect_token(&Token::LParen)?; @@ -14176,14 +14138,14 @@ impl<'a> Parser<'a> { }) } - fn parse_xml_namespace_definition(&mut self) -> Result { + fn parse_xml_namespace_definition(&self) -> Result { let uri = self.parse_expr()?; self.expect_keyword_is(Keyword::AS)?; let name = self.parse_identifier()?; Ok(XmlNamespaceDefinition { uri, name }) } - fn parse_xml_table_column(&mut self) -> Result { + fn parse_xml_table_column(&self) -> Result { let name = self.parse_identifier()?; let option = if self.parse_keyword(Keyword::FOR) { @@ -14218,7 +14180,7 @@ impl<'a> Parser<'a> { Ok(XmlTableColumn { name, option }) } - fn parse_xml_passing_clause(&mut self) -> Result { + fn parse_xml_passing_clause(&self) -> Result { let mut arguments = vec![]; if self.parse_keyword(Keyword::PASSING) { loop { @@ -14244,7 +14206,7 @@ impl<'a> Parser<'a> { } /// Parse a [TableFactor::SemanticView] - fn parse_semantic_view_table_factor(&mut self) -> Result { + fn parse_semantic_view_table_factor(&self) -> Result { self.expect_keyword(Keyword::SEMANTIC_VIEW)?; self.expect_token(&Token::LParen)?; @@ -14310,7 +14272,7 @@ impl<'a> Parser<'a> { }) } - fn parse_match_recognize(&mut self, table: TableFactor) -> Result { + fn parse_match_recognize(&self, table: TableFactor) -> Result { self.expect_token(&Token::LParen)?; let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { @@ -14415,7 +14377,7 @@ impl<'a> Parser<'a> { }) } - fn parse_base_pattern(&mut self) -> Result { + fn parse_base_pattern(&self) -> Result { match self.next_token().token { Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)), Token::Placeholder(s) if s == "$" => { @@ -14454,7 +14416,7 @@ impl<'a> Parser<'a> { } } - fn parse_repetition_pattern(&mut self) -> Result { + fn parse_repetition_pattern(&self) -> Result { let mut pattern = self.parse_base_pattern()?; loop { let token = self.next_token(); @@ -14509,7 +14471,7 @@ impl<'a> Parser<'a> { Ok(pattern) } - fn parse_concat_pattern(&mut self) -> Result { + fn parse_concat_pattern(&self) -> Result { let mut patterns = vec![self.parse_repetition_pattern()?]; while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) { patterns.push(self.parse_repetition_pattern()?); @@ -14520,7 +14482,7 @@ impl<'a> Parser<'a> { } } - fn parse_pattern(&mut self) -> Result { + fn parse_pattern(&self) -> Result { let pattern = self.parse_concat_pattern()?; if self.consume_token(&Token::Pipe) { match self.parse_pattern()? { @@ -14537,7 +14499,7 @@ impl<'a> Parser<'a> { } /// Parses a the timestamp version specifier (i.e. query historical data) - pub fn maybe_parse_table_version(&mut self) -> Result, ParserError> { + pub fn maybe_parse_table_version(&self) -> Result, ParserError> { if self.dialect.supports_timestamp_versioning() { if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF]) { @@ -14554,7 +14516,7 @@ impl<'a> Parser<'a> { /// Parses MySQL's JSON_TABLE column definition. /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR` - pub fn parse_json_table_column_def(&mut self) -> Result { + pub fn parse_json_table_column_def(&self) -> Result { if self.parse_keyword(Keyword::NESTED) { let _has_path_keyword = self.parse_keyword(Keyword::PATH); let path = self.parse_value()?.value; @@ -14603,7 +14565,7 @@ impl<'a> Parser<'a> { /// ``` /// /// Reference: - pub fn parse_openjson_table_column_def(&mut self) -> Result { + pub fn parse_openjson_table_column_def(&self) -> Result { let name = self.parse_identifier()?; let r#type = self.parse_data_type()?; let path = if let Token::SingleQuotedString(path) = self.peek_token().token { @@ -14625,7 +14587,7 @@ impl<'a> Parser<'a> { } fn parse_json_table_column_error_handling( - &mut self, + &self, ) -> Result, ParserError> { let res = if self.parse_keyword(Keyword::NULL) { JsonTableColumnErrorHandling::Null @@ -14641,7 +14603,7 @@ impl<'a> Parser<'a> { } pub fn parse_derived_table_factor( - &mut self, + &self, lateral: IsLateral, ) -> Result { let subquery = self.parse_query()?; @@ -14657,7 +14619,7 @@ impl<'a> Parser<'a> { }) } - fn parse_aliased_function_call(&mut self) -> Result { + fn parse_aliased_function_call(&self) -> Result { let function_name = match self.next_token().token { Token::Word(w) => Ok(w.value), _ => self.expected("a function identifier", self.peek_token()), @@ -14693,7 +14655,7 @@ impl<'a> Parser<'a> { /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value)); /// # Ok(()) /// # } - pub fn parse_expr_with_alias(&mut self) -> Result { + pub fn parse_expr_with_alias(&self) -> Result { let expr = self.parse_expr()?; let alias = if self.parse_keyword(Keyword::AS) { Some(self.parse_identifier()?) @@ -14704,10 +14666,7 @@ impl<'a> Parser<'a> { Ok(ExprWithAlias { expr, alias }) } - pub fn parse_pivot_table_factor( - &mut self, - table: TableFactor, - ) -> Result { + pub fn parse_pivot_table_factor(&self, table: TableFactor) -> Result { self.expect_token(&Token::LParen)?; let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?; self.expect_keyword_is(Keyword::FOR)?; @@ -14758,7 +14717,7 @@ impl<'a> Parser<'a> { } pub fn parse_unpivot_table_factor( - &mut self, + &self, table: TableFactor, ) -> Result { let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) { @@ -14790,7 +14749,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_join_constraint(&mut self, natural: bool) -> Result { + pub fn parse_join_constraint(&self, natural: bool) -> Result { if natural { Ok(JoinConstraint::Natural) } else if self.parse_keyword(Keyword::ON) { @@ -14806,7 +14765,7 @@ impl<'a> Parser<'a> { } /// Parse a GRANT statement. - pub fn parse_grant(&mut self) -> Result { + pub fn parse_grant(&self) -> Result { let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; self.expect_keyword_is(Keyword::TO)?; @@ -14847,7 +14806,7 @@ impl<'a> Parser<'a> { }) } - fn parse_grantees(&mut self) -> Result, ParserError> { + fn parse_grantees(&self) -> Result, ParserError> { let mut values = vec![]; let mut grantee_type = GranteesType::None; loop { @@ -14916,7 +14875,7 @@ impl<'a> Parser<'a> { } pub fn parse_grant_deny_revoke_privileges_objects( - &mut self, + &self, ) -> Result<(Privileges, Option), ParserError> { let privileges = if self.parse_keyword(Keyword::ALL) { Privileges::All { @@ -15102,7 +15061,7 @@ impl<'a> Parser<'a> { } fn parse_grant_procedure_or_function( - &mut self, + &self, name: &ObjectName, kw: &Option, ) -> Result, ParserError> { @@ -15126,8 +15085,8 @@ impl<'a> Parser<'a> { } } - pub fn parse_grant_permission(&mut self) -> Result { - fn parse_columns(parser: &mut Parser) -> Result>, ParserError> { + pub fn parse_grant_permission(&self) -> Result { + fn parse_columns(parser: &Parser) -> Result>, ParserError> { let columns = parser.parse_parenthesized_column_list(Optional, false)?; if columns.is_empty() { Ok(None) @@ -15246,7 +15205,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_action_create_object_type(&mut self) -> Option { + fn maybe_parse_action_create_object_type(&self) -> Option { // Multi-word object types if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) { Some(ActionCreateObjectType::ApplicationPackage) @@ -15289,7 +15248,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_apply_type(&mut self) -> Result { + fn parse_action_apply_type(&self) -> Result { if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) { Ok(ActionApplyType::AggregationPolicy) } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) { @@ -15315,7 +15274,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_action_execute_obj_type(&mut self) -> Option { + fn maybe_parse_action_execute_obj_type(&self) -> Option { if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) { Some(ActionExecuteObjectType::DataMetricFunction) } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) { @@ -15331,7 +15290,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_manage_type(&mut self) -> Result { + fn parse_action_manage_type(&self) -> Result { if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) { Ok(ActionManageType::AccountSupportCases) } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) { @@ -15351,7 +15310,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_modify_type(&mut self) -> Option { + fn parse_action_modify_type(&self) -> Option { if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) { Some(ActionModifyType::LogLevel) } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) { @@ -15365,7 +15324,7 @@ impl<'a> Parser<'a> { } } - fn parse_action_monitor_type(&mut self) -> Option { + fn parse_action_monitor_type(&self) -> Option { if self.parse_keyword(Keyword::EXECUTION) { Some(ActionMonitorType::Execution) } else if self.parse_keyword(Keyword::SECURITY) { @@ -15377,7 +15336,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_grantee_name(&mut self) -> Result { + pub fn parse_grantee_name(&self) -> Result { let mut name = self.parse_object_name(false)?; if self.dialect.supports_user_host_grantee() && name.0.len() == 1 @@ -15393,7 +15352,7 @@ impl<'a> Parser<'a> { } /// Parse [`Statement::Deny`] - pub fn parse_deny(&mut self) -> Result { + pub fn parse_deny(&self) -> Result { self.expect_keyword(Keyword::DENY)?; let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; @@ -15426,7 +15385,7 @@ impl<'a> Parser<'a> { } /// Parse a REVOKE statement - pub fn parse_revoke(&mut self) -> Result { + pub fn parse_revoke(&self) -> Result { let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; self.expect_keyword_is(Keyword::FROM)?; @@ -15450,7 +15409,7 @@ impl<'a> Parser<'a> { } /// Parse an REPLACE statement - pub fn parse_replace(&mut self) -> Result { + pub fn parse_replace(&self) -> Result { if !dialect_of!(self is MySqlDialect | GenericDialect) { return parser_err!( "Unsupported statement REPLACE", @@ -15469,12 +15428,12 @@ impl<'a> Parser<'a> { /// Parse an INSERT statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds - fn parse_insert_setexpr_boxed(&mut self) -> Result, ParserError> { + fn parse_insert_setexpr_boxed(&self) -> Result, ParserError> { Ok(Box::new(SetExpr::Insert(self.parse_insert()?))) } /// Parse an INSERT statement - pub fn parse_insert(&mut self) -> Result { + pub fn parse_insert(&self) -> Result { let or = self.parse_conflict_clause(); let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) { None @@ -15669,7 +15628,7 @@ impl<'a> Parser<'a> { // Parses input format clause used for [ClickHouse]. // // - pub fn parse_input_format_clause(&mut self) -> Result { + pub fn parse_input_format_clause(&self) -> Result { let ident = self.parse_identifier()?; let values = self .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))? @@ -15680,13 +15639,13 @@ impl<'a> Parser<'a> { /// Returns true if the immediate tokens look like the /// beginning of a subquery. `(SELECT ...` - fn peek_subquery_start(&mut self) -> bool { + fn peek_subquery_start(&self) -> bool { let [maybe_lparen, maybe_select] = self.peek_tokens(); Token::LParen == maybe_lparen && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT) } - fn parse_conflict_clause(&mut self) -> Option { + fn parse_conflict_clause(&self) -> Option { if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) { Some(SqliteOnConflict::Replace) } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) { @@ -15704,7 +15663,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_insert_partition(&mut self) -> Result>, ParserError> { + pub fn parse_insert_partition(&self) -> Result>, ParserError> { if self.parse_keyword(Keyword::PARTITION) { self.expect_token(&Token::LParen)?; let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?); @@ -15715,9 +15674,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_load_data_table_format( - &mut self, - ) -> Result, ParserError> { + pub fn parse_load_data_table_format(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::INPUTFORMAT) { let input_format = self.parse_expr()?; self.expect_keyword_is(Keyword::SERDE)?; @@ -15734,11 +15691,11 @@ impl<'a> Parser<'a> { /// Parse an UPDATE statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds - fn parse_update_setexpr_boxed(&mut self) -> Result, ParserError> { + fn parse_update_setexpr_boxed(&self) -> Result, ParserError> { Ok(Box::new(SetExpr::Update(self.parse_update()?))) } - pub fn parse_update(&mut self) -> Result { + pub fn parse_update(&self) -> Result { let or = self.parse_conflict_clause(); let table = self.parse_table_and_joins()?; let from_before_set = if self.parse_keyword(Keyword::FROM) { @@ -15785,7 +15742,7 @@ impl<'a> Parser<'a> { } /// Parse a `var = expr` assignment, used in an UPDATE statement - pub fn parse_assignment(&mut self) -> Result { + pub fn parse_assignment(&self) -> Result { let target = self.parse_assignment_target()?; self.expect_token(&Token::Eq)?; let value = self.parse_expr()?; @@ -15793,7 +15750,7 @@ impl<'a> Parser<'a> { } /// Parse the left-hand side of an assignment, used in an UPDATE statement - pub fn parse_assignment_target(&mut self) -> Result { + pub fn parse_assignment_target(&self) -> Result { if self.consume_token(&Token::LParen) { let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?; self.expect_token(&Token::RParen)?; @@ -15804,7 +15761,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_function_args(&mut self) -> Result { + pub fn parse_function_args(&self) -> Result { let arg = if self.dialect.supports_named_fn_args_with_expr_name() { self.maybe_parse(|p| { let name = p.parse_expr()?; @@ -15834,7 +15791,7 @@ impl<'a> Parser<'a> { Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into())) } - fn parse_function_named_arg_operator(&mut self) -> Result { + fn parse_function_named_arg_operator(&self) -> Result { if self.parse_keyword(Keyword::VALUE) { return Ok(FunctionArgOperator::Value); } @@ -15863,7 +15820,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_args(&mut self) -> Result, ParserError> { + pub fn parse_optional_args(&self) -> Result, ParserError> { if self.consume_token(&Token::RParen) { Ok(vec![]) } else { @@ -15873,7 +15830,7 @@ impl<'a> Parser<'a> { } } - fn parse_table_function_args(&mut self) -> Result { + fn parse_table_function_args(&self) -> Result { if self.consume_token(&Token::RParen) { return Ok(TableFunctionArgs { args: vec![], @@ -15902,7 +15859,7 @@ impl<'a> Parser<'a> { /// FIRST_VALUE(x ORDER BY 1,2,3); /// FIRST_VALUE(x IGNORE NULL); /// ``` - fn parse_function_argument_list(&mut self) -> Result { + fn parse_function_argument_list(&self) -> Result { let mut clauses = vec![]; // Handle clauses that may exist with an empty argument list @@ -15986,7 +15943,7 @@ impl<'a> Parser<'a> { }) } - fn parse_json_null_clause(&mut self) -> Option { + fn parse_json_null_clause(&self) -> Option { if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) { Some(JsonNullClause::AbsentOnNull) } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) { @@ -15997,7 +15954,7 @@ impl<'a> Parser<'a> { } fn maybe_parse_json_returning_clause( - &mut self, + &self, ) -> Result, ParserError> { if self.parse_keyword(Keyword::RETURNING) { let data_type = self.parse_data_type()?; @@ -16007,7 +15964,7 @@ impl<'a> Parser<'a> { } } - fn parse_duplicate_treatment(&mut self) -> Result, ParserError> { + fn parse_duplicate_treatment(&self) -> Result, ParserError> { let loc = self.peek_token().span.start; match ( self.parse_keyword(Keyword::ALL), @@ -16021,7 +15978,7 @@ impl<'a> Parser<'a> { } /// Parse a comma-delimited list of projections after SELECT - pub fn parse_select_item(&mut self) -> Result { + pub fn parse_select_item(&self) -> Result { let prefix = self .parse_one_of_keywords( self.dialect @@ -16086,7 +16043,7 @@ impl<'a> Parser<'a> { /// /// If it is not possible to parse it, will return an option. pub fn parse_wildcard_additional_options( - &mut self, + &self, wildcard_token: TokenWithSpan, ) -> Result { let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) { @@ -16130,9 +16087,7 @@ impl<'a> Parser<'a> { /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items. /// /// If it is not possible to parse it, will return an option. - pub fn parse_optional_select_item_ilike( - &mut self, - ) -> Result, ParserError> { + pub fn parse_optional_select_item_ilike(&self) -> Result, ParserError> { let opt_ilike = if self.parse_keyword(Keyword::ILIKE) { let next_token = self.next_token(); let pattern = match next_token.token { @@ -16150,7 +16105,7 @@ impl<'a> Parser<'a> { /// /// If it is not possible to parse it, will return an option. pub fn parse_optional_select_item_exclude( - &mut self, + &self, ) -> Result, ParserError> { let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) { if self.consume_token(&Token::LParen) { @@ -16172,7 +16127,7 @@ impl<'a> Parser<'a> { /// /// If it is not possible to parse it, will return an option. pub fn parse_optional_select_item_except( - &mut self, + &self, ) -> Result, ParserError> { let opt_except = if self.parse_keyword(Keyword::EXCEPT) { if self.peek_token().token == Token::LParen { @@ -16206,7 +16161,7 @@ impl<'a> Parser<'a> { /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items. pub fn parse_optional_select_item_rename( - &mut self, + &self, ) -> Result, ParserError> { let opt_rename = if self.parse_keyword(Keyword::RENAME) { if self.consume_token(&Token::LParen) { @@ -16227,7 +16182,7 @@ impl<'a> Parser<'a> { /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items. pub fn parse_optional_select_item_replace( - &mut self, + &self, ) -> Result, ParserError> { let opt_replace = if self.parse_keyword(Keyword::REPLACE) { if self.consume_token(&Token::LParen) { @@ -16246,7 +16201,7 @@ impl<'a> Parser<'a> { Ok(opt_replace) } - pub fn parse_replace_elements(&mut self) -> Result { + pub fn parse_replace_elements(&self) -> Result { let expr = self.parse_expr()?; let as_keyword = self.parse_keyword(Keyword::AS); let ident = self.parse_identifier()?; @@ -16259,7 +16214,7 @@ impl<'a> Parser<'a> { /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of /// them. - pub fn parse_asc_desc(&mut self) -> Option { + pub fn parse_asc_desc(&self) -> Option { if self.parse_keyword(Keyword::ASC) { Some(true) } else if self.parse_keyword(Keyword::DESC) { @@ -16270,13 +16225,13 @@ impl<'a> Parser<'a> { } /// Parse an [OrderByExpr] expression. - pub fn parse_order_by_expr(&mut self) -> Result { + pub fn parse_order_by_expr(&self) -> Result { self.parse_order_by_expr_inner(false) .map(|(order_by, _)| order_by) } /// Parse an [IndexColumn]. - pub fn parse_create_index_expr(&mut self) -> Result { + pub fn parse_create_index_expr(&self) -> Result { self.parse_order_by_expr_inner(true) .map(|(column, operator_class)| IndexColumn { column, @@ -16285,7 +16240,7 @@ impl<'a> Parser<'a> { } fn parse_order_by_expr_inner( - &mut self, + &self, with_operator_class: bool, ) -> Result<(OrderByExpr, Option), ParserError> { let expr = self.parse_expr()?; @@ -16325,7 +16280,7 @@ impl<'a> Parser<'a> { )) } - fn parse_order_by_options(&mut self) -> Result { + fn parse_order_by_options(&self) -> Result { let asc = self.parse_asc_desc(); let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) { @@ -16341,7 +16296,7 @@ impl<'a> Parser<'a> { // Parse a WITH FILL clause (ClickHouse dialect) // that follow the WITH FILL keywords in a ORDER BY clause - pub fn parse_with_fill(&mut self) -> Result { + pub fn parse_with_fill(&self) -> Result { let from = if self.parse_keyword(Keyword::FROM) { Some(self.parse_expr()?) } else { @@ -16365,7 +16320,7 @@ impl<'a> Parser<'a> { // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect) // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier - pub fn parse_interpolations(&mut self) -> Result, ParserError> { + pub fn parse_interpolations(&self) -> Result, ParserError> { if !self.parse_keyword(Keyword::INTERPOLATE) { return Ok(None); } @@ -16385,7 +16340,7 @@ impl<'a> Parser<'a> { } // Parse a INTERPOLATE expression (ClickHouse dialect) - pub fn parse_interpolation(&mut self) -> Result { + pub fn parse_interpolation(&self) -> Result { let column = self.parse_identifier()?; let expr = if self.parse_keyword(Keyword::AS) { Some(self.parse_expr()?) @@ -16397,7 +16352,7 @@ impl<'a> Parser<'a> { /// Parse a TOP clause, MSSQL equivalent of LIMIT, /// that follows after `SELECT [DISTINCT]`. - pub fn parse_top(&mut self) -> Result { + pub fn parse_top(&self) -> Result { let quantity = if self.consume_token(&Token::LParen) { let quantity = self.parse_expr()?; self.expect_token(&Token::RParen)?; @@ -16423,7 +16378,7 @@ impl<'a> Parser<'a> { } /// Parse a LIMIT clause - pub fn parse_limit(&mut self) -> Result, ParserError> { + pub fn parse_limit(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::ALL) { Ok(None) } else { @@ -16432,7 +16387,7 @@ impl<'a> Parser<'a> { } /// Parse an OFFSET clause - pub fn parse_offset(&mut self) -> Result { + pub fn parse_offset(&self) -> Result { let value = self.parse_expr()?; let rows = if self.parse_keyword(Keyword::ROW) { OffsetRows::Row @@ -16445,7 +16400,7 @@ impl<'a> Parser<'a> { } /// Parse a FETCH clause - pub fn parse_fetch(&mut self) -> Result { + pub fn parse_fetch(&self) -> Result { let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]); let (quantity, percent) = if self @@ -16474,7 +16429,7 @@ impl<'a> Parser<'a> { } /// Parse a FOR UPDATE/FOR SHARE clause - pub fn parse_lock(&mut self) -> Result { + pub fn parse_lock(&self) -> Result { let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? { Keyword::UPDATE => LockType::Update, Keyword::SHARE => LockType::Share, @@ -16499,7 +16454,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_values(&mut self, allow_empty: bool) -> Result { + pub fn parse_values(&self, allow_empty: bool) -> Result { let mut explicit_row = false; let rows = self.parse_comma_separated(|parser| { @@ -16520,7 +16475,7 @@ impl<'a> Parser<'a> { Ok(Values { explicit_row, rows }) } - pub fn parse_start_transaction(&mut self) -> Result { + pub fn parse_start_transaction(&self) -> Result { self.expect_keyword_is(Keyword::TRANSACTION)?; Ok(Statement::StartTransaction { modes: self.parse_transaction_modes()?, @@ -16533,7 +16488,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_begin(&mut self) -> Result { + pub fn parse_begin(&self) -> Result { let modifier = if !self.dialect.supports_start_transaction_modifier() { None } else if self.parse_keyword(Keyword::DEFERRED) { @@ -16565,7 +16520,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_begin_exception_end(&mut self) -> Result { + pub fn parse_begin_exception_end(&self) -> Result { let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?; let exception = if self.parse_keyword(Keyword::EXCEPTION) { @@ -16610,7 +16565,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_end(&mut self) -> Result { + pub fn parse_end(&self) -> Result { let modifier = if !self.dialect.supports_end_transaction_modifier() { None } else if self.parse_keyword(Keyword::TRY) { @@ -16627,7 +16582,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_transaction_modes(&mut self) -> Result, ParserError> { + pub fn parse_transaction_modes(&self) -> Result, ParserError> { let mut modes = vec![]; let mut required = false; loop { @@ -16665,7 +16620,7 @@ impl<'a> Parser<'a> { Ok(modes) } - pub fn parse_commit(&mut self) -> Result { + pub fn parse_commit(&self) -> Result { Ok(Statement::Commit { chain: self.parse_commit_rollback_chain()?, end: false, @@ -16673,14 +16628,14 @@ impl<'a> Parser<'a> { }) } - pub fn parse_rollback(&mut self) -> Result { + pub fn parse_rollback(&self) -> Result { let chain = self.parse_commit_rollback_chain()?; let savepoint = self.parse_rollback_savepoint()?; Ok(Statement::Rollback { chain, savepoint }) } - pub fn parse_commit_rollback_chain(&mut self) -> Result { + pub fn parse_commit_rollback_chain(&self) -> Result { let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]); if self.parse_keyword(Keyword::AND) { let chain = !self.parse_keyword(Keyword::NO); @@ -16691,7 +16646,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_rollback_savepoint(&mut self) -> Result, ParserError> { + pub fn parse_rollback_savepoint(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::TO) { let _ = self.parse_keyword(Keyword::SAVEPOINT); let savepoint = self.parse_identifier()?; @@ -16703,7 +16658,7 @@ impl<'a> Parser<'a> { } /// Parse a 'RAISERROR' statement - pub fn parse_raiserror(&mut self) -> Result { + pub fn parse_raiserror(&self) -> Result { self.expect_token(&Token::LParen)?; let message = Box::new(self.parse_expr()?); self.expect_token(&Token::Comma)?; @@ -16730,7 +16685,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_raiserror_option(&mut self) -> Result { + pub fn parse_raiserror_option(&self) -> Result { match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? { Keyword::LOG => Ok(RaisErrorOption::Log), Keyword::NOWAIT => Ok(RaisErrorOption::NoWait), @@ -16742,13 +16697,13 @@ impl<'a> Parser<'a> { } } - pub fn parse_deallocate(&mut self) -> Result { + pub fn parse_deallocate(&self) -> Result { let prepare = self.parse_keyword(Keyword::PREPARE); let name = self.parse_identifier()?; Ok(Statement::Deallocate { name, prepare }) } - pub fn parse_execute(&mut self) -> Result { + pub fn parse_execute(&self) -> Result { let name = if self.dialect.supports_execute_immediate() && self.parse_keyword(Keyword::IMMEDIATE) { @@ -16802,7 +16757,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_prepare(&mut self) -> Result { + pub fn parse_prepare(&self) -> Result { let name = self.parse_identifier()?; let mut data_types = vec![]; @@ -16820,7 +16775,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_unload(&mut self) -> Result { + pub fn parse_unload(&self) -> Result { self.expect_keyword(Keyword::UNLOAD)?; self.expect_token(&Token::LParen)?; let (query, query_text) = if matches!(self.peek_token().token, Token::SingleQuotedString(_)) @@ -16853,7 +16808,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_merge_clauses(&mut self) -> Result, ParserError> { + pub fn parse_merge_clauses(&self) -> Result, ParserError> { let mut clauses = vec![]; loop { if !(self.parse_keyword(Keyword::WHEN)) { @@ -16952,7 +16907,7 @@ impl<'a> Parser<'a> { Ok(clauses) } - fn parse_output(&mut self, start_keyword: Keyword) -> Result { + fn parse_output(&self, start_keyword: Keyword) -> Result { let select_items = self.parse_projection()?; let into_table = if start_keyword == Keyword::OUTPUT && self.peek_keyword(Keyword::INTO) { self.expect_keyword_is(Keyword::INTO)?; @@ -16971,7 +16926,7 @@ impl<'a> Parser<'a> { }) } - fn parse_select_into(&mut self) -> Result { + fn parse_select_into(&self) -> Result { let temporary = self .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY]) .is_some(); @@ -16987,7 +16942,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_merge(&mut self) -> Result { + pub fn parse_merge(&self) -> Result { let into = self.parse_keyword(Keyword::INTO); let table = self.parse_table_factor()?; @@ -17012,7 +16967,7 @@ impl<'a> Parser<'a> { }) } - fn parse_pragma_value(&mut self) -> Result { + fn parse_pragma_value(&self) -> Result { match self.parse_value()?.value { v @ Value::SingleQuotedString(_) => Ok(v), v @ Value::DoubleQuotedString(_) => Ok(v), @@ -17026,7 +16981,7 @@ impl<'a> Parser<'a> { } // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')'] - pub fn parse_pragma(&mut self) -> Result { + pub fn parse_pragma(&self) -> Result { let name = self.parse_object_name(false)?; if self.consume_token(&Token::LParen) { let value = self.parse_pragma_value()?; @@ -17052,14 +17007,14 @@ impl<'a> Parser<'a> { } /// `INSTALL [extension_name]` - pub fn parse_install(&mut self) -> Result { + pub fn parse_install(&self) -> Result { let extension_name = self.parse_identifier()?; Ok(Statement::Install { extension_name }) } /// Parse a SQL LOAD statement - pub fn parse_load(&mut self) -> Result { + pub fn parse_load(&self) -> Result { if self.dialect.supports_load_extension() { let extension_name = self.parse_identifier()?; Ok(Statement::Load { extension_name }) @@ -17093,7 +17048,7 @@ impl<'a> Parser<'a> { /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]] /// ``` /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize) - pub fn parse_optimize_table(&mut self) -> Result { + pub fn parse_optimize_table(&self) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let name = self.parse_object_name(false)?; let on_cluster = self.parse_optional_on_cluster()?; @@ -17133,7 +17088,7 @@ impl<'a> Parser<'a> { /// ``` /// /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details. - pub fn parse_create_sequence(&mut self, temporary: bool) -> Result { + pub fn parse_create_sequence(&self, temporary: bool) -> Result { //[ IF NOT EXISTS ] let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); //name @@ -17164,7 +17119,7 @@ impl<'a> Parser<'a> { }) } - fn parse_create_sequence_options(&mut self) -> Result, ParserError> { + fn parse_create_sequence_options(&self) -> Result, ParserError> { let mut sequence_options = vec![]; //[ INCREMENT [ BY ] increment ] if self.parse_keywords(&[Keyword::INCREMENT]) { @@ -17212,7 +17167,7 @@ impl<'a> Parser<'a> { /// Parse a `CREATE SERVER` statement. /// /// See [Statement::CreateServer] - pub fn parse_pg_create_server(&mut self) -> Result { + pub fn parse_pg_create_server(&self) -> Result { let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_object_name(false)?; @@ -17254,10 +17209,10 @@ impl<'a> Parser<'a> { /// The index of the first unprocessed token. pub fn index(&self) -> usize { - self.index + self.index.get() } - pub fn parse_named_window(&mut self) -> Result { + pub fn parse_named_window(&self) -> Result { let ident = self.parse_identifier()?; self.expect_keyword_is(Keyword::AS)?; @@ -17272,7 +17227,7 @@ impl<'a> Parser<'a> { Ok(NamedWindowDefinition(ident, window_expr)) } - pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result { + pub fn parse_create_procedure(&self, or_alter: bool) -> Result { let name = self.parse_object_name(false)?; let params = self.parse_optional_procedure_parameters()?; @@ -17295,7 +17250,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_window_spec(&mut self) -> Result { + pub fn parse_window_spec(&self) -> Result { let window_name = match self.peek_token().token { Token::Word(word) if word.keyword == Keyword::NoKeyword => { self.parse_optional_ident()? @@ -17329,7 +17284,7 @@ impl<'a> Parser<'a> { }) } - pub fn parse_create_type(&mut self) -> Result { + pub fn parse_create_type(&self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword_is(Keyword::AS)?; @@ -17376,7 +17331,7 @@ impl<'a> Parser<'a> { /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type]) /// /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html) - pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result { + pub fn parse_create_type_enum(&self, name: ObjectName) -> Result { self.expect_token(&Token::LParen)?; let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; self.expect_token(&Token::RParen)?; @@ -17387,14 +17342,14 @@ impl<'a> Parser<'a> { }) } - fn parse_parenthesized_identifiers(&mut self) -> Result, ParserError> { + fn parse_parenthesized_identifiers(&self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; self.expect_token(&Token::RParen)?; Ok(idents) } - fn parse_column_position(&mut self) -> Result, ParserError> { + fn parse_column_position(&self) -> Result, ParserError> { if dialect_of!(self is MySqlDialect | GenericDialect) { if self.parse_keyword(Keyword::FIRST) { Ok(Some(MySQLColumnPosition::First)) @@ -17410,14 +17365,14 @@ impl<'a> Parser<'a> { } /// Parse [Statement::Print] - fn parse_print(&mut self) -> Result { + fn parse_print(&self) -> Result { Ok(Statement::Print(PrintStatement { message: Box::new(self.parse_expr()?), })) } /// Parse [Statement::Return] - fn parse_return(&mut self) -> Result { + fn parse_return(&self) -> Result { match self.maybe_parse(|p| p.parse_expr())? { Some(expr) => Ok(Statement::Return(ReturnStatement { value: Some(ReturnStatementValue::Expr(expr)), @@ -17429,7 +17384,7 @@ impl<'a> Parser<'a> { /// /// Parse a `EXPORT DATA` statement. /// /// See [Statement::ExportData] - fn parse_export_data(&mut self) -> Result { + fn parse_export_data(&self) -> Result { self.expect_keywords(&[Keyword::EXPORT, Keyword::DATA])?; let connection = if self.parse_keywords(&[Keyword::WITH, Keyword::CONNECTION]) { @@ -17450,7 +17405,7 @@ impl<'a> Parser<'a> { })) } - fn parse_vacuum(&mut self) -> Result { + fn parse_vacuum(&self) -> Result { self.expect_keyword(Keyword::VACUUM)?; let full = self.parse_keyword(Keyword::FULL); let sort_only = self.parse_keywords(&[Keyword::SORT, Keyword::ONLY]); @@ -17490,7 +17445,7 @@ impl<'a> Parser<'a> { } /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH - fn peek_sub_query(&mut self) -> bool { + fn peek_sub_query(&self) -> bool { if self .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH]) .is_some() @@ -17501,7 +17456,7 @@ impl<'a> Parser<'a> { false } - pub(crate) fn parse_show_stmt_options(&mut self) -> Result { + pub(crate) fn parse_show_stmt_options(&self) -> Result { let show_in; let mut filter_position = None; if self.dialect.supports_show_like_before_in() { @@ -17527,7 +17482,7 @@ impl<'a> Parser<'a> { }) } - fn maybe_parse_show_stmt_in(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_in(&self) -> Result, ParserError> { let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { Some(Keyword::FROM) => ShowStatementInClause::FROM, Some(Keyword::IN) => ShowStatementInClause::IN, @@ -17597,7 +17552,7 @@ impl<'a> Parser<'a> { })) } - fn maybe_parse_show_stmt_starts_with(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_starts_with(&self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) { Ok(Some(self.parse_value()?.value)) } else { @@ -17605,7 +17560,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_show_stmt_limit(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_limit(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::LIMIT) { Ok(self.parse_limit()?) } else { @@ -17613,7 +17568,7 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_show_stmt_from(&mut self) -> Result, ParserError> { + fn maybe_parse_show_stmt_from(&self) -> Result, ParserError> { if self.parse_keyword(Keyword::FROM) { Ok(Some(self.parse_value()?.value)) } else { @@ -17622,7 +17577,7 @@ impl<'a> Parser<'a> { } pub(crate) fn in_column_definition_state(&self) -> bool { - matches!(self.state, ColumnDefinition) + matches!(self.state.get(), ColumnDefinition) } /// Parses options provided in key-value format. @@ -17630,7 +17585,7 @@ impl<'a> Parser<'a> { /// * `parenthesized` - true if the options are enclosed in parenthesis /// * `end_words` - a list of keywords that any of them indicates the end of the options section pub(crate) fn parse_key_value_options( - &mut self, + &self, parenthesized: bool, end_words: &[Keyword], ) -> Result { @@ -17668,10 +17623,7 @@ impl<'a> Parser<'a> { } /// Parses a `KEY = VALUE` construct based on the specified key - pub(crate) fn parse_key_value_option( - &mut self, - key: &Word, - ) -> Result { + pub(crate) fn parse_key_value_option(&self, key: &Word) -> Result { self.expect_token(&Token::Eq)?; match self.peek_token().token { Token::SingleQuotedString(_) => Ok(KeyValueOption { diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 54a158c1f..d62b1d8fe 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -23,12 +23,15 @@ #[cfg(not(feature = "std"))] use alloc::{ - borrow::ToOwned, + borrow::{Cow, ToOwned}, format, string::{String, ToString}, vec, vec::Vec, }; +#[cfg(feature = "std")] +use std::borrow::Cow; + use core::iter::Peekable; use core::num::NonZeroU8; use core::str::Chars; @@ -743,8 +746,12 @@ impl std::error::Error for TokenizerError {} struct State<'a> { peekable: Peekable>, - pub line: u64, - pub col: u64, + /// Reference to the original source string being tokenized + source: &'a str, + line: u64, + col: u64, + /// Byte position in the source string + byte_pos: usize, } impl State<'_> { @@ -759,6 +766,8 @@ impl State<'_> { } else { self.col += 1; } + // Update byte position (characters can be multi-byte in UTF-8) + self.byte_pos += s.len_utf8(); Some(s) } } @@ -769,6 +778,16 @@ impl State<'_> { self.peekable.peek() } + /// Return the character `n` positions ahead without advancing the stream. + /// For example, `peek_nth(0)` returns the current character (same as peek), + /// and `peek_nth(1)` returns the next character. + pub fn peek_nth(&self, n: usize) -> Option { + if self.byte_pos >= self.source.len() { + return None; + } + self.source[self.byte_pos..].chars().nth(n) + } + pub fn location(&self) -> Location { Location { line: self.line, @@ -893,8 +912,10 @@ impl<'a> Tokenizer<'a> { ) -> Result<(), TokenizerError> { let mut state = State { peekable: self.query.chars().peekable(), + source: self.query, line: 1, col: 1, + byte_pos: 0, }; let mut location = state.location(); @@ -908,22 +929,24 @@ impl<'a> Tokenizer<'a> { Ok(()) } - // Tokenize the identifier or keywords in `ch` + /// Tokenize an identifier or keyword after consuming the first character(s). + /// `consumed_byte_len` is the total byte length of the character(s) already consumed. fn tokenize_identifier_or_keyword( &self, - ch: impl IntoIterator, - chars: &mut State, + consumed_byte_len: usize, + chars: &mut State<'a>, ) -> Result, TokenizerError> { chars.next(); // consume the first char - let ch: String = ch.into_iter().collect(); - let word = self.tokenize_word(ch, chars); + let word = self.tokenize_word(consumed_byte_len, chars); // TODO: implement parsing of exponent here if word.chars().all(|x| x.is_ascii_digit() || x == '.') { let mut inner_state = State { peekable: word.chars().peekable(), + source: &word, line: 0, col: 0, + byte_pos: 0, }; let mut s = peeking_take_while(&mut inner_state, |ch| matches!(ch, '0'..='9' | '.')); let s2 = peeking_take_while(chars, |ch| matches!(ch, '0'..='9' | '.')); @@ -937,7 +960,7 @@ impl<'a> Tokenizer<'a> { /// Get the next token or return None fn next_token( &self, - chars: &mut State, + chars: &mut State<'a>, prev_token: Option<&Token>, ) -> Result, TokenizerError> { match chars.peek() { @@ -988,7 +1011,7 @@ impl<'a> Tokenizer<'a> { } _ => { // regular identifier starting with an "b" or "B" - let s = self.tokenize_word(b, chars); + let s = self.tokenize_word(b.len_utf8(), chars); Ok(Some(Token::make_word(&s, None))) } } @@ -1015,7 +1038,7 @@ impl<'a> Tokenizer<'a> { ), _ => { // regular identifier starting with an "r" or "R" - let s = self.tokenize_word(b, chars); + let s = self.tokenize_word(b.len_utf8(), chars); Ok(Some(Token::make_word(&s, None))) } } @@ -1034,7 +1057,7 @@ impl<'a> Tokenizer<'a> { } _ => { // regular identifier starting with an "N" - let s = self.tokenize_word(n, chars); + let s = self.tokenize_word(n.len_utf8(), chars); Ok(Some(Token::make_word(&s, None))) } } @@ -1051,7 +1074,7 @@ impl<'a> Tokenizer<'a> { } _ => { // regular identifier starting with an "E" or "e" - let s = self.tokenize_word(x, chars); + let s = self.tokenize_word(x.len_utf8(), chars); Ok(Some(Token::make_word(&s, None))) } } @@ -1070,7 +1093,7 @@ impl<'a> Tokenizer<'a> { } } // regular identifier starting with an "U" or "u" - let s = self.tokenize_word(x, chars); + let s = self.tokenize_word(x.len_utf8(), chars); Ok(Some(Token::make_word(&s, None))) } // The spec only allows an uppercase 'X' to introduce a hex @@ -1085,7 +1108,7 @@ impl<'a> Tokenizer<'a> { } _ => { // regular identifier starting with an "X" - let s = self.tokenize_word(x, chars); + let s = self.tokenize_word(x.len_utf8(), chars); Ok(Some(Token::make_word(&s, None))) } } @@ -1382,7 +1405,8 @@ impl<'a> Tokenizer<'a> { match chars.peek() { Some(s) if s.is_whitespace() => Ok(Some(Token::Mod)), Some(sch) if self.dialect.is_identifier_start('%') => { - self.tokenize_identifier_or_keyword([ch, *sch], chars) + let consumed_byte_len = ch.len_utf8() + sch.len_utf8(); + self.tokenize_identifier_or_keyword(consumed_byte_len, chars) } _ => self.start_binop(chars, "%", Token::Mod), } @@ -1610,7 +1634,8 @@ impl<'a> Tokenizer<'a> { self.consume_for_binop(chars, "##", Token::DoubleSharp) } Some(sch) if self.dialect.is_identifier_start('#') => { - self.tokenize_identifier_or_keyword([ch, *sch], chars) + let consumed_byte_len = ch.len_utf8() + sch.len_utf8(); + self.tokenize_identifier_or_keyword(consumed_byte_len, chars) } _ => self.start_binop(chars, "#", Token::Sharp), } @@ -1635,7 +1660,9 @@ impl<'a> Tokenizer<'a> { match chars.peek() { Some(' ') => Ok(Some(Token::AtAt)), Some(tch) if self.dialect.is_identifier_start('@') => { - self.tokenize_identifier_or_keyword([ch, '@', *tch], chars) + let consumed_byte_len = + ch.len_utf8() + '@'.len_utf8() + tch.len_utf8(); + self.tokenize_identifier_or_keyword(consumed_byte_len, chars) } _ => Ok(Some(Token::AtAt)), } @@ -1654,7 +1681,8 @@ impl<'a> Tokenizer<'a> { Some('\"') => Ok(Some(Token::AtSign)), Some('`') => Ok(Some(Token::AtSign)), Some(sch) if self.dialect.is_identifier_start('@') => { - self.tokenize_identifier_or_keyword([ch, *sch], chars) + let consumed_byte_len = ch.len_utf8() + sch.len_utf8(); + self.tokenize_identifier_or_keyword(consumed_byte_len, chars) } _ => Ok(Some(Token::AtSign)), } @@ -1695,7 +1723,8 @@ impl<'a> Tokenizer<'a> { // identifier or keyword ch if self.dialect.is_identifier_start(ch) => { - self.tokenize_identifier_or_keyword([ch], chars) + let consumed_byte_len = ch.len_utf8(); + self.tokenize_identifier_or_keyword(consumed_byte_len, chars) } '$' => Ok(Some(self.tokenize_dollar_preceded_value(chars)?)), @@ -1757,80 +1786,106 @@ impl<'a> Tokenizer<'a> { } /// Tokenize dollar preceded value (i.e: a string/placeholder) - fn tokenize_dollar_preceded_value(&self, chars: &mut State) -> Result { - let mut s = String::new(); - let mut value = String::new(); + fn tokenize_dollar_preceded_value( + &self, + chars: &mut State<'a>, + ) -> Result { + chars.next(); // consume first $ - chars.next(); + // Case 1: $$text$$ (untagged dollar-quoted string) + if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { + let (value, tag) = self.tokenize_dollar_quoted_string_borrowed(chars, None)?; + return Ok(Token::DollarQuotedString(DollarQuotedString { + value: value.into_owned(), + tag: tag.map(|t| t.into_owned()), + })); + } + + // If it's not $$ we have 2 options : + // Case 2: $tag$text$tag$ (tagged dollar-quoted string) if dialect supports it + // Case 3: $placeholder (e.g., $1, $name) + let tag_start = chars.byte_pos; + let _tag_slice = peeking_take_while_ref(chars, |ch| { + ch.is_alphanumeric() + || ch == '_' + || matches!(ch, '$' if self.dialect.supports_dollar_placeholder()) + }); + let tag_end = chars.byte_pos; - // If the dialect does not support dollar-quoted strings, then `$$` is rather a placeholder. + // Case 2: $tag$text$tag$ (tagged dollar-quoted string) if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { - chars.next(); + let tag_value = &chars.source[tag_start..tag_end]; + let (value, tag) = + self.tokenize_dollar_quoted_string_borrowed(chars, Some(tag_value))?; + return Ok(Token::DollarQuotedString(DollarQuotedString { + value: value.into_owned(), + tag: tag.map(|t| t.into_owned()), + })); + } - let mut is_terminated = false; - let mut prev: Option = None; + // Case 3: $placeholder (e.g., $1, $name) + let tag_value = &chars.source[tag_start..tag_end]; + Ok(Token::Placeholder(format!("${}", tag_value))) + } - while let Some(&ch) = chars.peek() { - if prev == Some('$') { - if ch == '$' { - chars.next(); - is_terminated = true; - break; - } else { - s.push('$'); - s.push(ch); + /// Tokenize a dollar-quoted string ($$text$$ or $tag$text$tag$), returning borrowed slices. + /// tag_prefix: None for $$, Some("tag") for $tag$ + /// Returns (value: Cow<'a, str>, tag: Option>) + fn tokenize_dollar_quoted_string_borrowed( + &self, + chars: &mut State<'a>, + tag_prefix: Option<&'a str>, + ) -> Result<(Cow<'a, str>, Option>), TokenizerError> { + chars.next(); // consume $ after tag (or second $ for $$) + let content_start = chars.byte_pos; + + match tag_prefix { + None => { + // Case: $$text$$ + let mut prev: Option = None; + + while let Some(&ch) = chars.peek() { + if prev == Some('$') && ch == '$' { + chars.next(); // consume final $ + // content_end is before the first $ of $$ + let content_end = chars.byte_pos - 2; + let value = &chars.source[content_start..content_end]; + return Ok((Cow::Borrowed(value), None)); } - } else if ch != '$' { - s.push(ch); - } - prev = Some(ch); - chars.next(); - } + prev = Some(ch); + chars.next(); + } - return if chars.peek().is_none() && !is_terminated { self.tokenizer_error(chars.location(), "Unterminated dollar-quoted string") - } else { - Ok(Token::DollarQuotedString(DollarQuotedString { - value: s, - tag: None, - })) - }; - } else { - value.push_str(&peeking_take_while(chars, |ch| { - ch.is_alphanumeric() - || ch == '_' - // Allow $ as a placeholder character if the dialect supports it - || matches!(ch, '$' if self.dialect.supports_dollar_placeholder()) - })); - - // If the dialect does not support dollar-quoted strings, don't look for the end delimiter. - if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { - chars.next(); - - let mut temp = String::new(); - let end_delimiter = format!("${value}$"); + } + Some(tag) => { + // Case: $tag$text$tag$ + let end_delimiter = format!("${}$", tag); + // Scan for the end delimiter + let buffer_start = content_start; loop { match chars.next() { - Some(ch) => { - temp.push(ch); - - if temp.ends_with(&end_delimiter) { - if let Some(temp) = temp.strip_suffix(&end_delimiter) { - s.push_str(temp); - } - break; + Some(_) => { + let current_pos = chars.byte_pos; + let buffer = &chars.source[buffer_start..current_pos]; + + if buffer.ends_with(&end_delimiter) { + // Found the end delimiter + let content_end = current_pos - end_delimiter.len(); + let value = &chars.source[content_start..content_end]; + return Ok(( + Cow::Borrowed(value), + if tag.is_empty() { + None + } else { + Some(Cow::Borrowed(tag)) + }, + )); } } None => { - if temp.ends_with(&end_delimiter) { - if let Some(temp) = temp.strip_suffix(&end_delimiter) { - s.push_str(temp); - } - break; - } - return self.tokenizer_error( chars.location(), "Unterminated dollar-quoted, expected $", @@ -1838,15 +1893,8 @@ impl<'a> Tokenizer<'a> { } } } - } else { - return Ok(Token::Placeholder(String::from("$") + &value)); } } - - Ok(Token::DollarQuotedString(DollarQuotedString { - value: s, - tag: if value.is_empty() { None } else { Some(value) }, - })) } fn tokenizer_error( @@ -1861,35 +1909,69 @@ impl<'a> Tokenizer<'a> { } // Consume characters until newline - fn tokenize_single_line_comment(&self, chars: &mut State) -> String { - let mut comment = peeking_take_while(chars, |ch| match ch { + fn tokenize_single_line_comment(&self, chars: &mut State<'a>) -> String { + self.tokenize_single_line_comment_borrowed(chars) + .to_string() + } + + /// Tokenize a single-line comment, returning a borrowed slice. + /// Returns a slice that includes the terminating newline character. + fn tokenize_single_line_comment_borrowed(&self, chars: &mut State<'a>) -> &'a str { + let start_pos = chars.byte_pos; + + // Consume until newline + peeking_take_while_ref(chars, |ch| match ch { '\n' => false, // Always stop at \n '\r' if dialect_of!(self is PostgreSqlDialect) => false, // Stop at \r for Postgres _ => true, // Keep consuming for other characters }); + // Consume the newline character if let Some(ch) = chars.next() { assert!(ch == '\n' || ch == '\r'); - comment.push(ch); } - comment + // Return slice including the newline + &chars.source[start_pos..chars.byte_pos] } - /// Tokenize an identifier or keyword, after the first char is already consumed. - fn tokenize_word(&self, first_chars: impl Into, chars: &mut State) -> String { - let mut s = first_chars.into(); - s.push_str(&peeking_take_while(chars, |ch| { - self.dialect.is_identifier_part(ch) - })); - s + /// Tokenize an identifier or keyword, after the first char(s) have already been consumed. + /// `consumed_byte_len` is the byte length of the consumed character(s). + fn tokenize_word(&self, consumed_byte_len: usize, chars: &mut State<'a>) -> String { + // Overflow check: ensure we can safely subtract + if consumed_byte_len > chars.byte_pos { + return String::new(); + } + + // Calculate where the first character started + let first_char_byte_pos = chars.byte_pos - consumed_byte_len; + + // Use the zero-copy version and convert to String + self.tokenize_word_borrowed(first_char_byte_pos, chars) + .to_string() + } + + /// Tokenize an identifier or keyword, returning a borrowed slice when possible. + /// The first character position must be provided (before it was consumed). + /// Returns a slice with the same lifetime as the State's source. + fn tokenize_word_borrowed(&self, first_char_byte_pos: usize, chars: &mut State<'a>) -> &'a str { + // Consume the rest of the word + peeking_take_while_ref(chars, |ch| self.dialect.is_identifier_part(ch)); + + // Boundary check: ensure first_char_byte_pos is valid + if first_char_byte_pos > chars.byte_pos || first_char_byte_pos > chars.source.len() { + return ""; + } + + // Return a slice from the first char to the current position + &chars.source[first_char_byte_pos..chars.byte_pos] } /// Read a quoted identifier fn tokenize_quoted_identifier( &self, quote_start: char, - chars: &mut State, + chars: &mut State<'a>, ) -> Result { let error_loc = chars.location(); chars.next(); // consume the opening quote @@ -2103,9 +2185,21 @@ impl<'a> Tokenizer<'a> { fn tokenize_multiline_comment( &self, - chars: &mut State, + chars: &mut State<'a>, ) -> Result, TokenizerError> { - let mut s = String::new(); + let s = self.tokenize_multiline_comment_borrowed(chars)?; + Ok(Some(Token::Whitespace(Whitespace::MultiLineComment( + s.to_string(), + )))) + } + + /// Tokenize a multi-line comment, returning a borrowed slice. + /// Returns a slice that excludes the opening `/*` (already consumed) and the final closing `*/`. + fn tokenize_multiline_comment_borrowed( + &self, + chars: &mut State<'a>, + ) -> Result<&'a str, TokenizerError> { + let start_pos = chars.byte_pos; let mut nested = 1; let supports_nested_comments = self.dialect.supports_nested_comments(); @@ -2113,24 +2207,22 @@ impl<'a> Tokenizer<'a> { match chars.next() { Some('/') if matches!(chars.peek(), Some('*')) && supports_nested_comments => { chars.next(); // consume the '*' - s.push('/'); - s.push('*'); nested += 1; } Some('*') if matches!(chars.peek(), Some('/')) => { chars.next(); // consume the '/' nested -= 1; if nested == 0 { - break Ok(Some(Token::Whitespace(Whitespace::MultiLineComment(s)))); + // We've consumed the final */, so exclude it from the slice + let end_pos = chars.byte_pos - 2; // Subtract 2 bytes for '*' and '/' + return Ok(&chars.source[start_pos..end_pos]); } - s.push('*'); - s.push('/'); } - Some(ch) => { - s.push(ch); + Some(_) => { + // Just consume the character, don't need to push to string } None => { - break self.tokenizer_error( + return self.tokenizer_error( chars.location(), "Unexpected EOF while in a multi-line comment", ); @@ -2139,27 +2231,66 @@ impl<'a> Tokenizer<'a> { } } - fn parse_quoted_ident(&self, chars: &mut State, quote_end: char) -> (String, Option) { + fn parse_quoted_ident(&self, chars: &mut State<'a>, quote_end: char) -> (String, Option) { + let (cow, last_char) = self.parse_quoted_ident_borrowed(chars, quote_end); + (cow.into_owned(), last_char) + } + + /// Parse quoted identifier, returning borrowed slice when possible. + /// Returns `(Cow<'a, str>, Option)` where the `Option` is the closing quote. + fn parse_quoted_ident_borrowed( + &self, + chars: &mut State<'a>, + quote_end: char, + ) -> (Cow<'a, str>, Option) { + let content_start = chars.byte_pos; + let mut has_doubled_quotes = false; let mut last_char = None; - let mut s = String::new(); + + // Scan to find the end and detect doubled quotes while let Some(ch) = chars.next() { if ch == quote_end { if chars.peek() == Some("e_end) { - chars.next(); - s.push(ch); - if !self.unescape { - // In no-escape mode, the given query has to be saved completely - s.push(ch); - } + has_doubled_quotes = true; + chars.next(); // consume the second quote } else { last_char = Some(quote_end); break; } - } else { - s.push(ch); } } - (s, last_char) + + let content_end = if last_char.is_some() { + chars.byte_pos - 1 // exclude the closing quote + } else { + chars.byte_pos + }; + + let content = &chars.source[content_start..content_end]; + + // If no doubled quotes, we can always borrow + if !has_doubled_quotes { + return (Cow::Borrowed(content), last_char); + } + + // If unescape=false, keep the content as-is (with doubled quotes) + if !self.unescape { + return (Cow::Borrowed(content), last_char); + } + + // Need to unescape: process doubled quotes + let mut result = String::new(); + let mut chars_iter = content.chars(); + + while let Some(ch) = chars_iter.next() { + result.push(ch); + if ch == quote_end { + // This is the first of a doubled quote, skip the second one + chars_iter.next(); + } + } + + (Cow::Owned(result), last_char) } #[allow(clippy::unnecessary_wraps)] @@ -2176,39 +2307,138 @@ impl<'a> Tokenizer<'a> { /// Read from `chars` until `predicate` returns `false` or EOF is hit. /// Return the characters read as String, and keep the first non-matching /// char available as `chars.next()`. -fn peeking_take_while(chars: &mut State, mut predicate: impl FnMut(char) -> bool) -> String { - let mut s = String::new(); +fn peeking_take_while(chars: &mut State, predicate: impl FnMut(char) -> bool) -> String { + peeking_take_while_ref(chars, predicate).to_string() +} + +/// Borrow a slice from the original string until `predicate` returns `false` or EOF is hit. +/// Returns a borrowed slice of the source string containing the matched characters. +/// This is the zero-copy version of `peeking_take_while`. +fn peeking_take_while_ref<'a>( + chars: &mut State<'a>, + mut predicate: impl FnMut(char) -> bool, +) -> &'a str { + // Record the starting byte position + let start_pos = chars.byte_pos; + + // Consume characters while predicate is true while let Some(&ch) = chars.peek() { if predicate(ch) { - chars.next(); // consume - s.push(ch); + chars.next(); // consume (this updates byte_pos) } else { break; } } - s + + // Get the ending byte position + let end_pos = chars.byte_pos; + + // Return the slice from the original source + &chars.source[start_pos..end_pos] } -/// Same as peeking_take_while, but also passes the next character to the predicate. -fn peeking_next_take_while( - chars: &mut State, +/// Borrow a slice from the original string until `predicate` returns `false` or EOF is hit. +/// This version also passes the next character to the predicate for lookahead, taking +/// both the current char and optional next char. Returns a borrowed slice of the source +/// string containing the matched characters. +/// +/// This is a zero-copy version of `peeking_next_take_while`. +fn peeking_take_while_next_ref<'a>( + chars: &mut State<'a>, mut predicate: impl FnMut(char, Option) -> bool, -) -> String { - let mut s = String::new(); +) -> &'a str { + // Record the starting byte position + let start_pos = chars.byte_pos; + + // Consume characters while predicate is true while let Some(&ch) = chars.peek() { - let next_char = chars.peekable.clone().nth(1); + let next_char = chars.peek_nth(1); if predicate(ch, next_char) { - chars.next(); // consume - s.push(ch); + chars.next(); // consume (this updates byte_pos) } else { break; } } - s + + // Get the ending byte position + let end_pos = chars.byte_pos; + + // Return the slice from the original source + &chars.source[start_pos..end_pos] +} + +/// Same as peeking_take_while, but also passes the next character to the predicate. +fn peeking_next_take_while( + chars: &mut State, + predicate: impl FnMut(char, Option) -> bool, +) -> String { + peeking_take_while_next_ref(chars, predicate).to_string() } fn unescape_single_quoted_string(chars: &mut State<'_>) -> Option { - Unescape::new(chars).unescape() + borrow_or_unescape_single_quoted_string(chars, true).map(|cow| cow.into_owned()) +} + +/// Scans a single-quoted string and returns either a borrowed slice or an unescaped owned string. +/// +/// Strategy: Scan once to find the end and detect escape sequences. +/// - If no escapes exist (or unescape=false), return Cow::Borrowed +/// - If escapes exist and unescape=true, reprocess using existing Unescape logic +fn borrow_or_unescape_single_quoted_string<'a>( + chars: &mut State<'a>, + unescape: bool, +) -> Option> { + let content_start = chars.byte_pos; + chars.next(); // consume opening ' + + // Scan to find end and check for escape sequences + let mut has_escapes = false; + + loop { + match chars.next() { + Some('\'') => { + // Check for doubled single quote (escape) + if chars.peek() == Some(&'\'') { + has_escapes = true; + chars.next(); // consume the second ' + } else { + // End of string found (including closing ') + let content_end = chars.byte_pos; + let full_content = &chars.source[content_start..content_end]; + + // If no unescaping needed, return borrowed (without quotes) + if !unescape || !has_escapes { + // Strip opening and closing quotes + return Some(Cow::Borrowed(&full_content[1..full_content.len() - 1])); + } + + // Need to unescape - reprocess using existing logic + // Create a temporary State from the content + let mut temp_state = State { + peekable: full_content.chars().peekable(), + source: full_content, + line: 0, + col: 0, + byte_pos: 0, + }; + + return Unescape::new(&mut temp_state).unescape().map(Cow::Owned); + } + } + Some('\\') => { + has_escapes = true; + // Skip next character (it's escaped) + chars.next(); + } + Some(_) => { + // Regular character, continue scanning + } + None => { + // Unexpected EOF + return None; + } + } + } } struct Unescape<'a: 'b, 'b> { @@ -2356,8 +2586,83 @@ impl<'a: 'b, 'b> Unescape<'a, 'b> { } fn unescape_unicode_single_quoted_string(chars: &mut State<'_>) -> Result { + borrow_or_unescape_unicode_single_quoted_string(chars, true).map(|cow| cow.into_owned()) +} + +/// Scans a unicode-escaped single-quoted string and returns either a borrowed slice or an unescaped owned string. +/// +/// Strategy: Scan once to find the end and detect escape sequences. +/// - If no escapes exist (or unescape=false), return Cow::Borrowed +/// - If escapes exist and unescape=true, reprocess with unicode escaping logic +fn borrow_or_unescape_unicode_single_quoted_string<'a>( + chars: &mut State<'a>, + unescape: bool, +) -> Result, TokenizerError> { + let content_start = chars.byte_pos; + let error_loc = chars.location(); + chars.next(); // consume the opening quote + + // Scan to find end and check for escape sequences + let mut has_escapes = false; + + loop { + match chars.next() { + Some('\'') => { + // Check for doubled single quote (escape) + if chars.peek() == Some(&'\'') { + has_escapes = true; + chars.next(); // consume the second ' + } else { + // End of string found (including closing ') + let content_end = chars.byte_pos; + let full_content = &chars.source[content_start..content_end]; + + // If no unescaping needed, return borrowed (without quotes) + if !unescape || !has_escapes { + // Strip opening and closing quotes + return Ok(Cow::Borrowed(&full_content[1..full_content.len() - 1])); + } + + // Need to unescape - reprocess with unicode logic + // Create a temporary State from the content + let mut temp_state = State { + peekable: full_content.chars().peekable(), + source: full_content, + line: 0, + col: 0, + byte_pos: 0, + }; + + return process_unicode_string_with_escapes(&mut temp_state, error_loc) + .map(Cow::Owned); + } + } + Some('\\') => { + has_escapes = true; + // Skip next character (it's escaped or part of unicode sequence) + chars.next(); + } + Some(_) => { + // Regular character, continue scanning + } + None => { + return Err(TokenizerError { + message: "Unterminated unicode encoded string literal".to_string(), + location: error_loc, + }); + } + } + } +} + +/// Process a unicode-escaped string using the original unescape logic +fn process_unicode_string_with_escapes( + chars: &mut State<'_>, + error_loc: Location, +) -> Result { let mut unescaped = String::new(); chars.next(); // consume the opening quote + while let Some(c) = chars.next() { match c { '\'' => { @@ -2384,9 +2689,10 @@ fn unescape_unicode_single_quoted_string(chars: &mut State<'_>) -> Result Result<(), ParserError> { is_identifier_part(ch) } - fn parse_prefix(&self, parser: &mut Parser) -> Option> { + fn parse_prefix(&self, parser: &Parser) -> Option> { if parser.consume_token(&Token::Number("1".to_string(), false)) { Some(Ok(Expr::Value(Value::Null.with_empty_span()))) } else { @@ -72,7 +72,7 @@ fn custom_infix_parser() -> Result<(), ParserError> { fn parse_infix( &self, - parser: &mut Parser, + parser: &Parser, expr: &Expr, _precedence: u8, ) -> Option> { @@ -110,7 +110,7 @@ fn custom_statement_parser() -> Result<(), ParserError> { is_identifier_part(ch) } - fn parse_statement(&self, parser: &mut Parser) -> Option> { + fn parse_statement(&self, parser: &Parser) -> Option> { if parser.parse_keyword(Keyword::SELECT) { for _ in 0..3 { let _ = parser.next_token(); diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs index a947db49b..24937d0a2 100644 --- a/tests/sqlparser_mssql.rs +++ b/tests/sqlparser_mssql.rs @@ -2213,7 +2213,7 @@ fn parse_mssql_if_else() { #[test] fn test_mssql_if_else_span() { let sql = "IF 1 = 1 SELECT '1' ELSE SELECT '2'"; - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); + let parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) @@ -2226,7 +2226,7 @@ fn test_mssql_if_else_multiline_span() { let sql_line2 = "SELECT '1'"; let sql_line3 = "ELSE SELECT '2'"; let sql = [sql_line1, sql_line2, sql_line3].join("\n"); - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(&sql).unwrap(); + let parser = Parser::new(&MsSqlDialect {}).try_with_sql(&sql).unwrap(); assert_eq!( parser.parse_statement().unwrap().span(), Span::new(