diff --git a/parser/src/ast/check.rs b/parser/src/ast/check.rs index 48d5da595..7892530d8 100644 --- a/parser/src/ast/check.rs +++ b/parser/src/ast/check.rs @@ -1,5 +1,5 @@ //! Check for additional syntax error -use crate::{ast::*, error::Error}; +use crate::{ast::*, error::Error, Result}; impl Cmd { /// Statement accessor @@ -27,7 +27,7 @@ impl Cmd { self.stmt().readonly() } /// check for extra rules - pub fn check(&self) -> Result<(), Error> { + pub fn check(&self) -> Result<()> { self.stmt().check() } } @@ -84,7 +84,7 @@ impl Stmt { } /// check for extra rules - pub fn check(&self) -> Result<(), Error> { + pub fn check(&self) -> Result<()> { match self { Self::AlterTable(alter_table) => { if let AlterTableBody::AddColumn(cd) = &alter_table.body { @@ -193,7 +193,7 @@ impl Stmt { impl CreateTableBody { /// check for extra rules - pub fn check(&self, tbl_name: &QualifiedName) -> Result<(), Error> { + pub fn check(&self, tbl_name: &QualifiedName) -> Result<()> { if let Self::ColumnsAndConstraints { columns, constraints: _, @@ -310,7 +310,7 @@ impl OneSelect { } } /// Check all VALUES have the same number of terms - pub fn push(values: &mut Vec>, v: Vec) -> Result<(), Error> { + pub fn push(values: &mut Vec>, v: Vec) -> Result<()> { if values[0].len() != v.len() { return Err(Error::Custom( "all VALUES must have the same number of terms".to_owned(), diff --git a/parser/src/lexer.rs b/parser/src/lexer.rs index 4bc9baa1a..1ea608e14 100644 --- a/parser/src/lexer.rs +++ b/parser/src/lexer.rs @@ -1,4 +1,4 @@ -use crate::{error::Error, token::TokenType}; +use crate::{error::Error, token::TokenType, Result}; include!(concat!(env!("OUT_DIR"), "/keywords.rs")); @@ -29,7 +29,7 @@ pub struct Lexer<'a> { } impl<'a> Iterator for Lexer<'a> { - type Item = Result, Error>; + type Item = Result>; #[inline] fn next(&mut self) -> Option { @@ -78,9 +78,9 @@ impl<'a> Lexer<'a> { } #[inline] - pub fn mark(&mut self, exc: F) -> Result + pub fn mark(&mut self, exc: F) -> Result where - F: FnOnce(&mut Self) -> Result, + F: FnOnce(&mut Self) -> Result, { let start_offset = self.offset; let result = exc(self); @@ -134,7 +134,7 @@ impl<'a> Lexer<'a> { } } - fn eat_while_number_digit(&mut self) -> Result<(), Error> { + fn eat_while_number_digit(&mut self) -> Result<()> { loop { let start = self.offset; self.eat_while(|b| b.is_some() && b.unwrap().is_ascii_digit()); @@ -160,7 +160,7 @@ impl<'a> Lexer<'a> { } } - fn eat_while_number_hexdigit(&mut self) -> Result<(), Error> { + fn eat_while_number_hexdigit(&mut self) -> Result<()> { loop { let start = self.offset; self.eat_while(|b| b.is_some() && b.unwrap().is_ascii_hexdigit()); @@ -243,7 +243,7 @@ impl<'a> Lexer<'a> { } } - fn eat_slash_or_comment(&mut self) -> Result, Error> { + fn eat_slash_or_comment(&mut self) -> Result> { let start = self.offset; self.eat_and_assert(|b| b == b'/'); match self.peek() { @@ -358,7 +358,7 @@ impl<'a> Lexer<'a> { } } - fn eat_ne(&mut self) -> Result, Error> { + fn eat_ne(&mut self) -> Result> { let start = self.offset; self.eat_and_assert(|b| b == b'!'); match self.peek() { @@ -395,7 +395,7 @@ impl<'a> Lexer<'a> { } } - fn eat_lit_or_id(&mut self) -> Result, Error> { + fn eat_lit_or_id(&mut self) -> Result> { let start = self.offset; let quote = self.eat().unwrap(); debug_assert!(quote == b'\'' || quote == b'"' || quote == b'`'); @@ -433,7 +433,7 @@ impl<'a> Lexer<'a> { }) } - fn eat_dot_or_frac(&mut self) -> Result, Error> { + fn eat_dot_or_frac(&mut self) -> Result> { let start = self.offset; self.eat_and_assert(|b| b == b'.'); @@ -464,7 +464,7 @@ impl<'a> Lexer<'a> { } } - fn eat_expo(&mut self) -> Result, Error> { + fn eat_expo(&mut self) -> Result> { let start = self.offset; self.eat_and_assert(|b| b == b'e' || b == b'E'); match self.peek() { @@ -490,7 +490,7 @@ impl<'a> Lexer<'a> { }) } - fn eat_number(&mut self) -> Result, Error> { + fn eat_number(&mut self) -> Result> { let start = self.offset; let first_digit = self.eat().unwrap(); debug_assert!(first_digit.is_ascii_digit()); @@ -548,7 +548,7 @@ impl<'a> Lexer<'a> { } } - fn eat_bracket(&mut self) -> Result, Error> { + fn eat_bracket(&mut self) -> Result> { let start = self.offset; self.eat_and_assert(|b| b == b'['); self.eat_while(|b| b.is_some() && b.unwrap() != b']'); @@ -567,7 +567,7 @@ impl<'a> Lexer<'a> { } } - fn eat_var(&mut self) -> Result, Error> { + fn eat_var(&mut self) -> Result> { let start = self.offset; let tok = self.eat().unwrap(); debug_assert!(tok == b'?' || tok == b'$' || tok == b'@' || tok == b'#' || tok == b':'); @@ -599,7 +599,7 @@ impl<'a> Lexer<'a> { } #[inline] - fn eat_blob_or_id(&mut self) -> Result, Error> { + fn eat_blob_or_id(&mut self) -> Result> { let start = self.offset; let start_char = self.eat().unwrap(); debug_assert!(is_identifier_start(start_char)); @@ -643,7 +643,7 @@ impl<'a> Lexer<'a> { } } - fn eat_unrecognized(&mut self) -> Result, Error> { + fn eat_unrecognized(&mut self) -> Result> { let start = self.offset; self.eat_while(|b| b.is_some() && !b.unwrap().is_ascii_whitespace()); Err(Error::UnrecognizedToken( diff --git a/parser/src/lib.rs b/parser/src/lib.rs index bc1e427b5..14f16356c 100644 --- a/parser/src/lib.rs +++ b/parser/src/lib.rs @@ -3,3 +3,5 @@ pub mod error; pub mod lexer; pub mod parser; pub mod token; + +type Result = std::result::Result; diff --git a/parser/src/parser.rs b/parser/src/parser.rs index f0e148b7a..283fda2d9 100644 --- a/parser/src/parser.rs +++ b/parser/src/parser.rs @@ -13,6 +13,7 @@ use crate::ast::{ use crate::error::Error; use crate::lexer::{Lexer, Token}; use crate::token::TokenType::{self, *}; +use crate::Result; macro_rules! peek_expect { ( $parser:expr, $( $x:ident ),* $(,)?) => { @@ -89,7 +90,7 @@ fn from_bytes(bytes: &[u8]) -> String { } #[inline] -fn join_type_from_bytes(s: &[u8]) -> Result { +fn join_type_from_bytes(s: &[u8]) -> Result { if b"CROSS".eq_ignore_ascii_case(s) { Ok(JoinType::INNER | JoinType::CROSS) } else if b"FULL".eq_ignore_ascii_case(s) { @@ -113,7 +114,7 @@ fn join_type_from_bytes(s: &[u8]) -> Result { } #[inline] -fn new_join_type(n0: &[u8], n1: Option<&[u8]>, n2: Option<&[u8]>) -> Result { +fn new_join_type(n0: &[u8], n1: Option<&[u8]>, n2: Option<&[u8]>) -> Result { let mut jt = join_type_from_bytes(n0)?; if let Some(n1) = n1 { @@ -147,7 +148,7 @@ pub struct Parser<'a> { } impl<'a> Iterator for Parser<'a> { - type Item = Result; + type Item = Result; #[inline] fn next(&mut self) -> Option { @@ -177,7 +178,7 @@ impl<'a> Parser<'a> { } // entrypoint of parsing - pub fn next_cmd(&mut self) -> Result, Error> { + pub fn next_cmd(&mut self) -> Result> { // consumes prefix SEMI while let Some(token) = self.peek()? { if token.token_type == Some(TK_SEMI) { @@ -241,9 +242,7 @@ impl<'a> Parser<'a> { } #[inline(always)] - fn consume_lexer_without_whitespaces_or_comments( - &mut self, - ) -> Option, Error>> { + fn consume_lexer_without_whitespaces_or_comments(&mut self) -> Option>> { debug_assert!(!self.peekable); loop { let tok = self.lexer.next(); @@ -257,7 +256,7 @@ impl<'a> Parser<'a> { } } - fn next_token(&mut self) -> Result>, Error> { + fn next_token(&mut self) -> Result>> { debug_assert!(!self.peekable); let mut next = self.consume_lexer_without_whitespaces_or_comments(); @@ -492,9 +491,9 @@ impl<'a> Parser<'a> { } #[inline] - fn mark(&mut self, exc: F) -> Result + fn mark(&mut self, exc: F) -> Result where - F: FnOnce(&mut Self) -> Result, + F: FnOnce(&mut Self) -> Result, { let old_peekable = self.peekable; let old_current_token = self.current_token.clone(); @@ -523,7 +522,7 @@ impl<'a> Parser<'a> { /// Get the next token from the lexer #[inline] - fn eat(&mut self) -> Result>, Error> { + fn eat(&mut self) -> Result>> { let result = self.peek()?; self.peekable = false; // Clear the peek mark after consuming Ok(result) @@ -531,7 +530,7 @@ impl<'a> Parser<'a> { /// Peek at the next token without consuming it #[inline] - fn peek(&mut self) -> Result>, Error> { + fn peek(&mut self) -> Result>> { if self.peekable { return Ok(Some(self.current_token.clone())); } @@ -540,7 +539,7 @@ impl<'a> Parser<'a> { } #[inline] - fn eat_no_eof(&mut self) -> Result, Error> { + fn eat_no_eof(&mut self) -> Result> { match self.eat()? { None => Err(Error::ParseUnexpectedEOF), Some(token) => Ok(token), @@ -548,14 +547,14 @@ impl<'a> Parser<'a> { } #[inline] - fn peek_no_eof(&mut self) -> Result, Error> { + fn peek_no_eof(&mut self) -> Result> { match self.peek()? { None => Err(Error::ParseUnexpectedEOF), Some(token) => Ok(token), } } - fn parse_stmt(&mut self) -> Result { + fn parse_stmt(&mut self) -> Result { let tok = peek_expect!( self, TK_BEGIN, @@ -606,7 +605,7 @@ impl<'a> Parser<'a> { } } - fn parse_nm(&mut self) -> Result { + fn parse_nm(&mut self) -> Result { let tok = eat_expect!(self, TK_ID, TK_STRING, TK_INDEXED, TK_JOIN_KW); let first_char = tok.value[0]; // no need to check empty @@ -616,7 +615,7 @@ impl<'a> Parser<'a> { } } - fn parse_transopt(&mut self) -> Result, Error> { + fn parse_transopt(&mut self) -> Result> { match self.peek()? { None => Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -637,7 +636,7 @@ impl<'a> Parser<'a> { } } - fn parse_begin(&mut self) -> Result { + fn parse_begin(&mut self) -> Result { eat_assert!(self, TK_BEGIN); let transtype = match self.peek()? { @@ -665,14 +664,14 @@ impl<'a> Parser<'a> { }) } - fn parse_commit(&mut self) -> Result { + fn parse_commit(&mut self) -> Result { eat_assert!(self, TK_COMMIT, TK_END); Ok(Stmt::Commit { name: self.parse_transopt()?, }) } - fn parse_rollback(&mut self) -> Result { + fn parse_rollback(&mut self) -> Result { eat_assert!(self, TK_ROLLBACK); let tx_name = self.parse_transopt()?; @@ -700,14 +699,14 @@ impl<'a> Parser<'a> { }) } - fn parse_savepoint(&mut self) -> Result { + fn parse_savepoint(&mut self) -> Result { eat_assert!(self, TK_SAVEPOINT); Ok(Stmt::Savepoint { name: self.parse_nm()?, }) } - fn parse_release(&mut self) -> Result { + fn parse_release(&mut self) -> Result { eat_assert!(self, TK_RELEASE); if self.peek_no_eof()?.token_type == Some(TK_SAVEPOINT) { @@ -719,7 +718,7 @@ impl<'a> Parser<'a> { }) } - fn parse_create_view(&mut self, temporary: bool) -> Result { + fn parse_create_view(&mut self, temporary: bool) -> Result { eat_assert!(self, TK_VIEW); let if_not_exists = self.parse_if_not_exists()?; let view_name = self.parse_fullname(false)?; @@ -735,7 +734,7 @@ impl<'a> Parser<'a> { }) } - fn parse_create_materialized_view(&mut self) -> Result { + fn parse_create_materialized_view(&mut self) -> Result { eat_assert!(self, TK_MATERIALIZED); eat_assert!(self, TK_VIEW); let if_not_exists = self.parse_if_not_exists()?; @@ -751,7 +750,7 @@ impl<'a> Parser<'a> { }) } - fn parse_vtab_arg(&mut self) -> Result { + fn parse_vtab_arg(&mut self) -> Result { let tok = self.peek_no_eof()?; // minus len() because lexer already consumed the token @@ -793,7 +792,7 @@ impl<'a> Parser<'a> { Ok(from_bytes(&self.lexer.input[start_idx..end_idx])) } - fn parse_create_virtual(&mut self) -> Result { + fn parse_create_virtual(&mut self) -> Result { eat_assert!(self, TK_VIRTUAL); eat_expect!(self, TK_TABLE); let if_not_exists = self.parse_if_not_exists()?; @@ -837,7 +836,7 @@ impl<'a> Parser<'a> { })) } - fn parse_create_stmt(&mut self) -> Result { + fn parse_create_stmt(&mut self) -> Result { eat_assert!(self, TK_CREATE); let mut first_tok = peek_expect!( self, @@ -868,7 +867,7 @@ impl<'a> Parser<'a> { } } - fn parse_with_stmt(&mut self) -> Result { + fn parse_with_stmt(&mut self) -> Result { let with = self.parse_with()?; debug_assert!(with.is_some()); let first_tok = @@ -883,7 +882,7 @@ impl<'a> Parser<'a> { } } - fn parse_if_not_exists(&mut self) -> Result { + fn parse_if_not_exists(&mut self) -> Result { if let Some(tok) = self.peek()? { if tok.token_type == Some(TK_IF) { eat_assert!(self, TK_IF); @@ -899,7 +898,7 @@ impl<'a> Parser<'a> { Ok(true) } - fn parse_fullname(&mut self, allow_alias: bool) -> Result { + fn parse_fullname(&mut self, allow_alias: bool) -> Result { let first_name = self.parse_nm()?; let secone_name = if let Some(tok) = self.peek()? { @@ -943,7 +942,7 @@ impl<'a> Parser<'a> { } } - fn parse_signed(&mut self) -> Result, Error> { + fn parse_signed(&mut self) -> Result> { peek_expect!(self, TK_FLOAT, TK_INTEGER, TK_PLUS, TK_MINUS); let expr = self.parse_expr_operand()?; @@ -958,7 +957,7 @@ impl<'a> Parser<'a> { } } - fn parse_type(&mut self) -> Result, Error> { + fn parse_type(&mut self) -> Result> { let mut type_name = if let Some(tok) = self.peek()? { match tok.token_type.unwrap().fallback_id_if_ok() { TK_ID | TK_STRING => { @@ -1032,7 +1031,7 @@ impl<'a> Parser<'a> { /// /// this function detect precedence by peeking first token of operator /// after parsing a operand (binary operator) - fn current_token_precedence(&mut self) -> Result, Error> { + fn current_token_precedence(&mut self) -> Result> { let tok = self.peek()?; if tok.is_none() { return Ok(None); @@ -1056,7 +1055,7 @@ impl<'a> Parser<'a> { } } - fn parse_distinct(&mut self) -> Result, Error> { + fn parse_distinct(&mut self) -> Result> { match self.peek()? { None => Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -1073,7 +1072,7 @@ impl<'a> Parser<'a> { } } - fn parse_filter_clause(&mut self) -> Result>, Error> { + fn parse_filter_clause(&mut self) -> Result>> { match self.peek()? { None => return Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -1091,7 +1090,7 @@ impl<'a> Parser<'a> { Ok(Some(expr)) } - fn parse_frame_opt(&mut self) -> Result, Error> { + fn parse_frame_opt(&mut self) -> Result> { let range_or_rows = match self.peek()? { None => return Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -1201,7 +1200,7 @@ impl<'a> Parser<'a> { })) } - fn parse_window(&mut self) -> Result { + fn parse_window(&mut self) -> Result { let name = match self.peek()? { None => None, Some(tok) => match tok.token_type.unwrap() { @@ -1232,7 +1231,7 @@ impl<'a> Parser<'a> { }) } - fn parse_over_clause(&mut self) -> Result, Error> { + fn parse_over_clause(&mut self) -> Result> { match self.peek()? { None => return Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -1255,7 +1254,7 @@ impl<'a> Parser<'a> { } } - fn parse_filter_over(&mut self) -> Result { + fn parse_filter_over(&mut self) -> Result { let filter_clause = self.parse_filter_clause()?; let over_clause = self.parse_over_clause()?; Ok(FunctionTail { @@ -1264,7 +1263,7 @@ impl<'a> Parser<'a> { }) } - fn parse_raise_type(&mut self) -> Result { + fn parse_raise_type(&mut self) -> Result { let tok = eat_expect!(self, TK_ROLLBACK, TK_ABORT, TK_FAIL); match tok.token_type.unwrap() { @@ -1275,7 +1274,7 @@ impl<'a> Parser<'a> { } } - fn parse_expr_operand(&mut self) -> Result, Error> { + fn parse_expr_operand(&mut self) -> Result> { let tok = peek_expect!( self, TK_LP, @@ -1542,7 +1541,7 @@ impl<'a> Parser<'a> { } #[allow(clippy::vec_box)] - fn parse_expr_list(&mut self) -> Result>, Error> { + fn parse_expr_list(&mut self) -> Result>> { let mut exprs = vec![]; while let Some(tok) = self.peek()? { match tok.token_type.unwrap().fallback_id_if_ok() { @@ -1564,7 +1563,7 @@ impl<'a> Parser<'a> { Ok(exprs) } - fn parse_expr(&mut self, precedence: u8) -> Result, Error> { + fn parse_expr(&mut self, precedence: u8) -> Result> { let mut result = self.parse_expr_operand()?; loop { @@ -1892,7 +1891,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_collate(&mut self) -> Result, Error> { + fn parse_collate(&mut self) -> Result> { if let Some(tok) = self.peek()? { if tok.token_type == Some(TK_COLLATE) { eat_assert!(self, TK_COLLATE); @@ -1911,7 +1910,7 @@ impl<'a> Parser<'a> { } } - fn parse_sort_order(&mut self) -> Result, Error> { + fn parse_sort_order(&mut self) -> Result> { match self.peek()? { Some(tok) if tok.token_type == Some(TK_ASC) => { eat_assert!(self, TK_ASC); @@ -1925,7 +1924,7 @@ impl<'a> Parser<'a> { } } - fn parse_eid(&mut self) -> Result { + fn parse_eid(&mut self) -> Result { let nm = self.parse_nm()?; let collate = self.parse_collate()?; let sort_order = self.parse_sort_order()?; @@ -1936,7 +1935,7 @@ impl<'a> Parser<'a> { }) } - fn parse_eid_list(&mut self) -> Result, Error> { + fn parse_eid_list(&mut self) -> Result> { if let Some(tok) = self.peek()? { if tok.token_type == Some(TK_LP) { eat_assert!(self, TK_LP); @@ -1962,7 +1961,7 @@ impl<'a> Parser<'a> { Ok(columns) } - fn parse_common_table_expr(&mut self) -> Result { + fn parse_common_table_expr(&mut self) -> Result { let nm = self.parse_nm()?; let eid_list = self.parse_eid_list()?; eat_expect!(self, TK_AS); @@ -1989,7 +1988,7 @@ impl<'a> Parser<'a> { }) } - fn parse_with(&mut self) -> Result, Error> { + fn parse_with(&mut self) -> Result> { if let Some(tok) = self.peek()? { if tok.token_type == Some(TK_WITH) { eat_assert!(self, TK_WITH); @@ -2022,7 +2021,7 @@ impl<'a> Parser<'a> { Ok(Some(With { recursive, ctes })) } - fn parse_as(&mut self) -> Result, Error> { + fn parse_as(&mut self) -> Result> { match self.peek()? { None => Ok(None), Some(tok) => match tok.token_type.unwrap().fallback_id_if_ok() { @@ -2036,7 +2035,7 @@ impl<'a> Parser<'a> { } } - fn parse_window_defn(&mut self) -> Result { + fn parse_window_defn(&mut self) -> Result { let name = self.parse_nm()?; eat_expect!(self, TK_AS); eat_expect!(self, TK_LP); @@ -2045,7 +2044,7 @@ impl<'a> Parser<'a> { Ok(WindowDef { name, window }) } - fn parse_window_clause(&mut self) -> Result, Error> { + fn parse_window_clause(&mut self) -> Result> { match self.peek()? { None => return Ok(vec![]), Some(tok) => match tok.token_type.unwrap() { @@ -2070,7 +2069,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_group_by(&mut self) -> Result, Error> { + fn parse_group_by(&mut self) -> Result> { match self.peek()? { None => return Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -2094,7 +2093,7 @@ impl<'a> Parser<'a> { Ok(Some(GroupBy { exprs, having })) } - fn parse_where(&mut self) -> Result>, Error> { + fn parse_where(&mut self) -> Result>> { match self.peek()? { None => Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -2108,7 +2107,7 @@ impl<'a> Parser<'a> { } } - fn parse_indexed(&mut self) -> Result, Error> { + fn parse_indexed(&mut self) -> Result> { match self.peek()? { None => Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -2127,7 +2126,7 @@ impl<'a> Parser<'a> { } } - fn parse_nm_list(&mut self) -> Result, Error> { + fn parse_nm_list(&mut self) -> Result> { let mut names = vec![self.parse_nm()?]; loop { @@ -2143,7 +2142,7 @@ impl<'a> Parser<'a> { Ok(names) } - fn parse_nm_list_opt(&mut self) -> Result, Error> { + fn parse_nm_list_opt(&mut self) -> Result> { match self.peek()? { Some(tok) if tok.token_type == Some(TK_LP) => { eat_assert!(self, TK_LP); @@ -2156,7 +2155,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_on_using(&mut self) -> Result, Error> { + fn parse_on_using(&mut self) -> Result> { match self.peek()? { None => Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -2177,7 +2176,7 @@ impl<'a> Parser<'a> { } } - fn parse_joined_tables(&mut self) -> Result, Error> { + fn parse_joined_tables(&mut self) -> Result> { let mut result = vec![]; while let Some(tok) = self.peek()? { let op = match tok.token_type.unwrap() { @@ -2298,7 +2297,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_from_clause(&mut self) -> Result { + fn parse_from_clause(&mut self) -> Result { let tok = peek_expect!(self, TK_ID, TK_STRING, TK_INDEXED, TK_JOIN_KW, TK_LP); match tok.token_type.unwrap().fallback_id_if_ok() { @@ -2358,7 +2357,7 @@ impl<'a> Parser<'a> { } } - fn parse_from_clause_opt(&mut self) -> Result, Error> { + fn parse_from_clause_opt(&mut self) -> Result> { match self.peek()? { None => return Ok(None), Some(tok) if tok.token_type == Some(TK_FROM) => { @@ -2370,7 +2369,7 @@ impl<'a> Parser<'a> { Ok(Some(self.parse_from_clause()?)) } - fn parse_select_column(&mut self) -> Result { + fn parse_select_column(&mut self) -> Result { match self.peek_no_eof()?.token_type.unwrap().fallback_id_if_ok() { TK_STAR => { eat_assert!(self, TK_STAR); @@ -2379,7 +2378,7 @@ impl<'a> Parser<'a> { tt => { // dot STAR case if tt == TK_ID || tt == TK_STRING || tt == TK_INDEXED || tt == TK_JOIN_KW { - if let Ok(res) = self.mark(|p| -> Result { + if let Ok(res) = self.mark(|p| -> Result { let name = p.parse_nm()?; eat_expect!(p, TK_DOT); eat_expect!(p, TK_STAR); @@ -2396,7 +2395,7 @@ impl<'a> Parser<'a> { } } - fn parse_select_columns(&mut self) -> Result, Error> { + fn parse_select_columns(&mut self) -> Result> { let mut result = vec![self.parse_select_column()?]; while let Some(tok) = self.peek()? { @@ -2413,7 +2412,7 @@ impl<'a> Parser<'a> { } #[allow(clippy::vec_box)] - fn parse_nexpr_list(&mut self) -> Result>, Error> { + fn parse_nexpr_list(&mut self) -> Result>> { let mut result = vec![self.parse_expr(0)?]; while let Some(tok) = self.peek()? { if tok.token_type == Some(TK_COMMA) { @@ -2428,7 +2427,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_one_select(&mut self) -> Result { + fn parse_one_select(&mut self) -> Result { let tok = eat_expect!(self, TK_SELECT, TK_VALUES); match tok.token_type.unwrap() { TK_SELECT => { @@ -2470,7 +2469,7 @@ impl<'a> Parser<'a> { } } - fn parse_select_body(&mut self) -> Result { + fn parse_select_body(&mut self) -> Result { let select = self.parse_one_select()?; let mut compounds = vec![]; while let Some(tok) = self.peek()? { @@ -2504,7 +2503,7 @@ impl<'a> Parser<'a> { Ok(SelectBody { select, compounds }) } - fn parse_sorted_column(&mut self) -> Result { + fn parse_sorted_column(&mut self) -> Result { let expr = self.parse_expr(0)?; let sort_order = self.parse_sort_order()?; @@ -2528,7 +2527,7 @@ impl<'a> Parser<'a> { }) } - fn parse_sort_list(&mut self) -> Result, Error> { + fn parse_sort_list(&mut self) -> Result> { let mut columns = vec![self.parse_sorted_column()?]; loop { match self.peek()? { @@ -2543,7 +2542,7 @@ impl<'a> Parser<'a> { Ok(columns) } - fn parse_order_by(&mut self) -> Result, Error> { + fn parse_order_by(&mut self) -> Result> { if let Some(tok) = self.peek()? { if tok.token_type == Some(TK_ORDER) { eat_assert!(self, TK_ORDER); @@ -2558,7 +2557,7 @@ impl<'a> Parser<'a> { self.parse_sort_list() } - fn parse_limit(&mut self) -> Result, Error> { + fn parse_limit(&mut self) -> Result> { if let Some(tok) = self.peek()? { if tok.token_type == Some(TK_LIMIT) { eat_assert!(self, TK_LIMIT); @@ -2587,7 +2586,7 @@ impl<'a> Parser<'a> { })) } - fn parse_select_without_cte(&mut self, with: Option) -> Result { + fn parse_select_without_cte(&mut self, with: Option) -> Result { let with = self.parse_with()?; self.parse_select_without_cte(with) } - fn parse_primary_table_constraint(&mut self) -> Result { + fn parse_primary_table_constraint(&mut self) -> Result { eat_assert!(self, TK_PRIMARY); eat_expect!(self, TK_KEY); eat_expect!(self, TK_LP); @@ -2619,7 +2618,7 @@ impl<'a> Parser<'a> { }) } - fn parse_unique_table_constraint(&mut self) -> Result { + fn parse_unique_table_constraint(&mut self) -> Result { eat_assert!(self, TK_UNIQUE); eat_expect!(self, TK_LP); let columns = self.parse_sort_list()?; @@ -2631,7 +2630,7 @@ impl<'a> Parser<'a> { }) } - fn parse_check_table_constraint(&mut self) -> Result { + fn parse_check_table_constraint(&mut self) -> Result { eat_assert!(self, TK_CHECK); eat_expect!(self, TK_LP); let expr = self.parse_expr(0)?; @@ -2639,7 +2638,7 @@ impl<'a> Parser<'a> { Ok(TableConstraint::Check(expr)) } - fn parse_foreign_key_table_constraint(&mut self) -> Result { + fn parse_foreign_key_table_constraint(&mut self) -> Result { eat_assert!(self, TK_FOREIGN); eat_expect!(self, TK_KEY); peek_expect!(self, TK_LP); // make sure we have columns @@ -2654,7 +2653,7 @@ impl<'a> Parser<'a> { }) } - fn parse_named_table_constraints(&mut self) -> Result, Error> { + fn parse_named_table_constraints(&mut self) -> Result> { let mut result = vec![]; while let Some(tok) = self.peek()? { @@ -2708,7 +2707,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_table_option(&mut self) -> Result { + fn parse_table_option(&mut self) -> Result { match self.peek()? { Some(tok) => match tok.token_type.unwrap().fallback_id_if_ok() { TK_WITHOUT => { @@ -2740,7 +2739,7 @@ impl<'a> Parser<'a> { } } - fn parse_table_options(&mut self) -> Result { + fn parse_table_options(&mut self) -> Result { let mut result = self.parse_table_option()?; loop { match self.peek()? { @@ -2755,7 +2754,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_create_table_args(&mut self) -> Result { + fn parse_create_table_args(&mut self) -> Result { let tok = eat_expect!(self, TK_LP, TK_AS); match tok.token_type.unwrap() { TK_AS => Ok(CreateTableBody::AsSelect(self.parse_select()?)), @@ -2792,7 +2791,7 @@ impl<'a> Parser<'a> { } } - fn parse_create_table(&mut self, temporary: bool) -> Result { + fn parse_create_table(&mut self, temporary: bool) -> Result { eat_assert!(self, TK_TABLE); let if_not_exists = self.parse_if_not_exists()?; let tbl_name = self.parse_fullname(false)?; @@ -2805,7 +2804,7 @@ impl<'a> Parser<'a> { }) } - fn parse_analyze(&mut self) -> Result { + fn parse_analyze(&mut self) -> Result { eat_assert!(self, TK_ANALYZE); let name = match self.peek()? { Some(tok) => match tok.token_type.unwrap().fallback_id_if_ok() { @@ -2818,7 +2817,7 @@ impl<'a> Parser<'a> { Ok(Stmt::Analyze { name }) } - fn parse_attach(&mut self) -> Result { + fn parse_attach(&mut self) -> Result { eat_assert!(self, TK_ATTACH); if self.peek_no_eof()?.token_type == Some(TK_DATABASE) { eat_assert!(self, TK_DATABASE); @@ -2841,7 +2840,7 @@ impl<'a> Parser<'a> { Ok(Stmt::Attach { expr, db_name, key }) } - fn parse_detach(&mut self) -> Result { + fn parse_detach(&mut self) -> Result { eat_assert!(self, TK_DETACH); if self.peek_no_eof()?.token_type == Some(TK_DATABASE) { eat_assert!(self, TK_DATABASE); @@ -2852,7 +2851,7 @@ impl<'a> Parser<'a> { }) } - fn parse_pragma_value(&mut self) -> Result { + fn parse_pragma_value(&mut self) -> Result { match self.peek_no_eof()?.token_type.unwrap().fallback_id_if_ok() { TK_ON | TK_DELETE | TK_DEFAULT => { let tok = eat_assert!(self, TK_ON, TK_DELETE, TK_DEFAULT); @@ -2867,7 +2866,7 @@ impl<'a> Parser<'a> { } } - fn parse_pragma(&mut self) -> Result { + fn parse_pragma(&mut self) -> Result { eat_assert!(self, TK_PRAGMA); let name = self.parse_fullname(false)?; match self.peek()? { @@ -2894,7 +2893,7 @@ impl<'a> Parser<'a> { } } - fn parse_vacuum(&mut self) -> Result { + fn parse_vacuum(&mut self) -> Result { eat_assert!(self, TK_VACUUM); let name = match self.peek()? { @@ -2916,7 +2915,7 @@ impl<'a> Parser<'a> { Ok(Stmt::Vacuum { name, into }) } - fn parse_term(&mut self) -> Result, Error> { + fn parse_term(&mut self) -> Result> { peek_expect!( self, TK_NULL, @@ -2930,7 +2929,7 @@ impl<'a> Parser<'a> { self.parse_expr_operand() } - fn parse_default_column_constraint(&mut self) -> Result { + fn parse_default_column_constraint(&mut self) -> Result { eat_assert!(self, TK_DEFAULT); match self.peek_no_eof()?.token_type.unwrap().fallback_id_if_ok() { TK_LP => { @@ -2962,7 +2961,7 @@ impl<'a> Parser<'a> { } } - fn parse_resolve_type(&mut self) -> Result { + fn parse_resolve_type(&mut self) -> Result { match self.peek_no_eof()?.token_type.unwrap() { TK_IGNORE => { eat_assert!(self, TK_IGNORE); @@ -2976,7 +2975,7 @@ impl<'a> Parser<'a> { } } - fn parse_on_conflict(&mut self) -> Result, Error> { + fn parse_on_conflict(&mut self) -> Result> { match self.peek()? { None => return Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -2991,7 +2990,7 @@ impl<'a> Parser<'a> { Ok(Some(self.parse_resolve_type()?)) } - fn parse_or_conflict(&mut self) -> Result, Error> { + fn parse_or_conflict(&mut self) -> Result> { match self.peek()? { None => return Ok(None), Some(tok) => match tok.token_type.unwrap() { @@ -3005,7 +3004,7 @@ impl<'a> Parser<'a> { Ok(Some(self.parse_resolve_type()?)) } - fn parse_auto_increment(&mut self) -> Result { + fn parse_auto_increment(&mut self) -> Result { match self.peek()? { None => Ok(false), Some(tok) => match tok.token_type.unwrap() { @@ -3018,7 +3017,7 @@ impl<'a> Parser<'a> { } } - fn parse_not_null_column_constraint(&mut self) -> Result { + fn parse_not_null_column_constraint(&mut self) -> Result { let has_not = match self.peek_no_eof()?.token_type.unwrap() { TK_NOT => { eat_assert!(self, TK_NOT); @@ -3034,7 +3033,7 @@ impl<'a> Parser<'a> { }) } - fn parse_primary_column_constraint(&mut self) -> Result { + fn parse_primary_column_constraint(&mut self) -> Result { eat_assert!(self, TK_PRIMARY); eat_expect!(self, TK_KEY); let sort_order = self.parse_sort_order()?; @@ -3048,12 +3047,12 @@ impl<'a> Parser<'a> { }) } - fn parse_unique_column_constraint(&mut self) -> Result { + fn parse_unique_column_constraint(&mut self) -> Result { eat_assert!(self, TK_UNIQUE); Ok(ColumnConstraint::Unique(self.parse_on_conflict()?)) } - fn parse_check_column_constraint(&mut self) -> Result { + fn parse_check_column_constraint(&mut self) -> Result { eat_assert!(self, TK_CHECK); eat_expect!(self, TK_LP); let expr = self.parse_expr(0)?; @@ -3061,7 +3060,7 @@ impl<'a> Parser<'a> { Ok(ColumnConstraint::Check(expr)) } - fn parse_ref_act(&mut self) -> Result { + fn parse_ref_act(&mut self) -> Result { let tok = eat_expect!(self, TK_SET, TK_CASCADE, TK_RESTRICT, TK_NO); match tok.token_type.unwrap() { @@ -3083,7 +3082,7 @@ impl<'a> Parser<'a> { } } - fn parse_ref_args(&mut self) -> Result, Error> { + fn parse_ref_args(&mut self) -> Result> { let mut result = vec![]; while let Some(tok) = self.peek()? { @@ -3109,7 +3108,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_foreign_key_clause(&mut self) -> Result { + fn parse_foreign_key_clause(&mut self) -> Result { eat_assert!(self, TK_REFERENCES); let name = self.parse_nm()?; let eid_list = self.parse_eid_list()?; @@ -3121,7 +3120,7 @@ impl<'a> Parser<'a> { }) } - fn parse_defer_subclause(&mut self) -> Result, Error> { + fn parse_defer_subclause(&mut self) -> Result> { let has_not = match self.peek()? { Some(tok) => match tok.token_type.unwrap() { TK_DEFERRABLE => false, @@ -3158,7 +3157,7 @@ impl<'a> Parser<'a> { })) } - fn parse_reference_column_constraint(&mut self) -> Result { + fn parse_reference_column_constraint(&mut self) -> Result { let clause = self.parse_foreign_key_clause()?; let deref_clause = self.parse_defer_subclause()?; Ok(ColumnConstraint::ForeignKey { @@ -3167,7 +3166,7 @@ impl<'a> Parser<'a> { }) } - fn parse_generated_column_constraint(&mut self) -> Result { + fn parse_generated_column_constraint(&mut self) -> Result { let tok = eat_assert!(self, TK_GENERATED, TK_AS); match tok.token_type.unwrap() { TK_GENERATED => { @@ -3196,7 +3195,7 @@ impl<'a> Parser<'a> { Ok(ColumnConstraint::Generated { expr, typ }) } - fn parse_named_column_constraints(&mut self) -> Result, Error> { + fn parse_named_column_constraints(&mut self) -> Result> { let mut result = vec![]; loop { @@ -3288,7 +3287,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_column_definition(&mut self) -> Result { + fn parse_column_definition(&mut self) -> Result { let col_name = self.parse_nm()?; let col_type = self.parse_type()?; let constraints = self.parse_named_column_constraints()?; @@ -3299,7 +3298,7 @@ impl<'a> Parser<'a> { }) } - fn parse_alter(&mut self) -> Result { + fn parse_alter(&mut self) -> Result { eat_assert!(self, TK_ALTER); eat_expect!(self, TK_TABLE); let tbl_name = self.parse_fullname(false)?; @@ -3358,7 +3357,7 @@ impl<'a> Parser<'a> { } } - fn parse_create_index(&mut self) -> Result { + fn parse_create_index(&mut self) -> Result { let tok = eat_assert!(self, TK_INDEX, TK_UNIQUE); let has_unique = tok.token_type == Some(TK_UNIQUE); if has_unique { @@ -3384,7 +3383,7 @@ impl<'a> Parser<'a> { }) } - fn parse_set(&mut self) -> Result { + fn parse_set(&mut self) -> Result { let tok = peek_expect!(self, TK_LP, TK_ID, TK_STRING, TK_JOIN_KW, TK_INDEXED); match tok.token_type.unwrap() { @@ -3409,7 +3408,7 @@ impl<'a> Parser<'a> { } } - fn parse_set_list(&mut self) -> Result, Error> { + fn parse_set_list(&mut self) -> Result> { let mut results = vec![self.parse_set()?]; loop { match self.peek()? { @@ -3424,7 +3423,7 @@ impl<'a> Parser<'a> { Ok(results) } - fn parse_returning(&mut self) -> Result, Error> { + fn parse_returning(&mut self) -> Result> { match self.peek()? { Some(tok) if tok.token_type == Some(TK_RETURNING) => { eat_assert!(self, TK_RETURNING); @@ -3435,7 +3434,7 @@ impl<'a> Parser<'a> { self.parse_select_columns() } - fn parse_upsert(&mut self) -> Result<(Option>, Vec), Error> { + fn parse_upsert(&mut self) -> Result<(Option>, Vec)> { match self.peek()? { Some(tok) => match tok.token_type.unwrap() { TK_ON => { @@ -3508,7 +3507,7 @@ impl<'a> Parser<'a> { } } - fn parse_trigger_insert_cmd(&mut self) -> Result { + fn parse_trigger_insert_cmd(&mut self) -> Result { let tok = eat_assert!(self, TK_INSERT, TK_REPLACE); let resolve_type = match tok.token_type.unwrap() { TK_INSERT => self.parse_or_conflict()?, @@ -3531,7 +3530,7 @@ impl<'a> Parser<'a> { }) } - fn parse_trigger_update_cmd(&mut self) -> Result { + fn parse_trigger_update_cmd(&mut self) -> Result { eat_assert!(self, TK_UPDATE); let or_conflict = self.parse_or_conflict()?; let tbl_name = self.parse_nm()?; @@ -3548,7 +3547,7 @@ impl<'a> Parser<'a> { }) } - fn parse_trigger_delete_cmd(&mut self) -> Result { + fn parse_trigger_delete_cmd(&mut self) -> Result { eat_assert!(self, TK_DELETE); eat_expect!(self, TK_FROM); let tbl_name = self.parse_nm()?; @@ -3559,7 +3558,7 @@ impl<'a> Parser<'a> { }) } - fn parse_trigger_cmd(&mut self) -> Result { + fn parse_trigger_cmd(&mut self) -> Result { let tok = peek_expect!( self, TK_INSERT, TK_REPLACE, TK_UPDATE, TK_DELETE, TK_WITH, TK_SELECT, TK_VALUES, ); @@ -3576,7 +3575,7 @@ impl<'a> Parser<'a> { Ok(result) } - fn parse_create_trigger(&mut self, temporary: bool) -> Result { + fn parse_create_trigger(&mut self, temporary: bool) -> Result { eat_assert!(self, TK_TRIGGER); let if_not_exists = self.parse_if_not_exists()?; @@ -3658,7 +3657,7 @@ impl<'a> Parser<'a> { }) } - fn parse_delete_without_cte(&mut self, with: Option) -> Result { + fn parse_delete_without_cte(&mut self, with: Option) -> Result { eat_assert!(self, TK_DELETE); eat_expect!(self, TK_FROM); let tbl_name = self.parse_fullname(true)?; @@ -3678,12 +3677,12 @@ impl<'a> Parser<'a> { }) } - fn parse_delete(&mut self) -> Result { + fn parse_delete(&mut self) -> Result { let with = self.parse_with()?; self.parse_delete_without_cte(with) } - fn parse_if_exists(&mut self) -> Result { + fn parse_if_exists(&mut self) -> Result { match self.peek()? { Some(tok) if tok.token_type == Some(TK_IF) => { eat_assert!(self, TK_IF); @@ -3694,7 +3693,7 @@ impl<'a> Parser<'a> { } } - fn parse_drop_stmt(&mut self) -> Result { + fn parse_drop_stmt(&mut self) -> Result { eat_assert!(self, TK_DROP); let tok = peek_expect!(self, TK_TABLE, TK_INDEX, TK_TRIGGER, TK_VIEW); @@ -3739,7 +3738,7 @@ impl<'a> Parser<'a> { } } - fn parse_insert_without_cte(&mut self, with: Option) -> Result { + fn parse_insert_without_cte(&mut self, with: Option) -> Result { let tok = eat_assert!(self, TK_INSERT, TK_REPLACE); let resolve_type = match tok.token_type.unwrap() { TK_INSERT => self.parse_or_conflict()?, @@ -3773,12 +3772,12 @@ impl<'a> Parser<'a> { }) } - fn parse_insert(&mut self) -> Result { + fn parse_insert(&mut self) -> Result { let with = self.parse_with()?; self.parse_insert_without_cte(with) } - fn parse_update_without_cte(&mut self, with: Option) -> Result { + fn parse_update_without_cte(&mut self, with: Option) -> Result { eat_assert!(self, TK_UPDATE); let resolve_type = self.parse_or_conflict()?; let tbl_name = self.parse_fullname(true)?; @@ -3804,12 +3803,12 @@ impl<'a> Parser<'a> { })) } - fn parse_update(&mut self) -> Result { + fn parse_update(&mut self) -> Result { let with = self.parse_with()?; self.parse_update_without_cte(with) } - fn parse_reindex(&mut self) -> Result { + fn parse_reindex(&mut self) -> Result { eat_assert!(self, TK_REINDEX); match self.peek()? { Some(tok) => match tok.token_type.unwrap().fallback_id_if_ok() {